def cmd_resync(args): """Stashes unstaged changes, Fetches upstream data from master branch, Auto-Merge/Rebase from master branch Performs smart pull+merge, Pushes local commits up, and Unstashes changes. Defaults to current branch. """ if args.get(0): upstream = fuzzy_match_branch(args.get(0)) if upstream: is_external = True original_branch = repo.head.ref.name else: print "{0} doesn't exist. Use a branch that does.".format( colored.yellow(args.get(0))) sys.exit(1) else: upstream = "master" original_branch = repo.head.ref.name if repo.is_dirty(): status_log(stash_it, 'Saving local changes.', sync=True) switch_to(upstream) status_log(smart_pull, 'Pulling commits from the server.') switch_to(original_branch) status_log(smart_merge, 'Grafting commits from {0}.'.format( colored.yellow(upstream)), upstream, allow_rebase=False) if unstash_index(sync=True): status_log(unstash_it, 'Restoring local changes.', sync=True) status_log(smart_pull, 'Pulling commits from the server.') status_log(push, 'Pushing commits to the server.', original_branch)
def add_entry(): """Adds an entry to the diary""" title_string = "Title (press %s when finished)" % finish_key # print(title_string) puts(colored.yellow(title_string)) puts(colored.green("="*len(title_string))) title = sys.stdin.read().strip() if title: entry_string = "\nEnter your entry: (press %s when finished)" % finish_key puts(colored.yellow(entry_string)) puts(colored.green("="*len(entry_string))) # reads all the data entered from the user data = sys.stdin.read().strip() if data: # if something was actually entered puts(colored.yellow( "\nEnter comma separated tags(if any!): (press %s when finished) : " % finish_key)) puts(colored.green("="*(len(title_string)+33))) tags = sys.stdin.read().strip() tags = processTags(tags) puts(colored.green("\n"+"="*len(entry_string))) # anything other than 'n' if input("\nSave entry (y/n) : ").lower() != 'n': DiaryEntry.create(content=data, tags=tags, title=title) puts(colored.green("Saved successfully")) else: puts( colored.red("No title entered! Press Enter to return to main menu")) input() clear() return
def check_execution(self, md5): parentquery='parent_md5:'+md5 md5query='md5:'+md5 if md5query.endswith(" "): print colored.red("[-] Bit9 did not capture the MD5 :(\n") else: print colored.yellow("[*] Checking if Parent MD5 process in Carbon Black...") parentresult = self.cb.process_search(parentquery, sort='start desc') if parentresult['total_results']==0: print colored.cyan("[+] Not a Parent MD5 process") else: cbparentmd5url=self.parentmd5url+md5+"&sort=&rows=10&start=0" print colored.green("[+] Parent MD5 event found in Carbon Black.") print colored.cyan(cbparentmd5url) print colored.yellow("[*] Checking if MD5 seen in Carbon Black...") md5result = self.cb.process_search(md5query, sort='start desc') if md5result['total_results'] == 0: print colored.cyan("[+] Not seen in Carbon Black.") else: cbmd5url=self.md5url+md5+"&sort=&rows=10&start=0" print colored.green("[+] MD5 Found in CB.") print colored.cyan(cbmd5url) #cb=CB()
def Run(report): print colored.yellow("[*] Creating time table.") timelist=[] timetable={} for item in report['process'].get('filemod_complete', []): timelist.append(item.split("|")[1][:-4]) try: for item in report['process'].get('regmod_complete', []): timelist.append(item.split("|")[1][:-4]) except: print colored.red("[-] No registry modifications made.") pass try: for item in report['process'].get('netconn_complete', []): timelist.append(item.split("|")[0][:-4]) except: print colored.red("[-] No network connections found in process report.") pass for item in report['process'].get('modload_complete', []): timelist.append(item.split("|")[0][:-4]) timelist=sorted(set(timelist)) for time in timelist: #print datetime.strptime(time, '%Y-%m-%d %H:%M:%S') timetable[time]=[] print colored.green("[+] Completed.\n") return timetable,timelist
def _watch_unwatch(self, command, short_ids, watch=True): commands = TraktTvController.__parse_command(command) if not command: return for command in progress_bar.bar(commands): show_id = short_ids[command[0]] season_id = command[1] episode_id = command[2] if season_id == -1 and episode_id == -1: # SHOW if watch: self.api.post_show_seen(tvdb_id=show_id) else: puts(colored.yellow('Cannot "unsee" a show - Skipping')) elif episode_id == -1: # SEASON if watch: self.api.post_show_season_seen(tvdb_id=show_id, season=season_id) else: puts(colored.yellow('Cannot "unsee" a season - Skipping')) else: # EPISODE if watch: self.api.post_show_episode_seen(tvdb_id=show_id, episodes=[{'season':season_id, 'episode':episode_id}]) else: self.api.post_show_episode_unseen(tvdb_id=show_id, episodes=[{'season':season_id, 'episode':episode_id}]) self.api.post_show_watchlist(shows=[{"tvdb_id": show_id},])
def tarbell_install_template(args): """Install a project template.""" with ensure_settings(args) as settings: template_url = args.get(0) matches = [template for template in settings.config["project_templates"] if template["url"] == template_url] if matches: puts("\n{0} already exists. Nothing more to do.\n".format( colored.yellow(template_url) )) sys.exit() puts("\nInstalling {0}".format(colored.cyan(template_url))) tempdir = tempfile.mkdtemp() puts("\n- Cloning repo to {0}".format(colored.green(tempdir))) Repo.clone_from(template_url, tempdir) base_path = os.path.join(tempdir, "_base/") filename, pathname, description = imp.find_module('base', [base_path]) base = imp.load_module('base', filename, pathname, description) puts("\n- Found _base/base.py") try: name = base.NAME puts("\n- Name specified in base.py: {0}".format(colored.yellow(name))) except AttributeError: name = template_url.split("/")[-1] puts("\n- No name specified in base.py, using '{0}'".format(colored.yellow(name))) settings.config["project_templates"].append({"name": name, "url": template_url}) settings.save() _delete_dir(tempdir) puts("\n+ Added new project template: {0}".format(colored.yellow(name)))
def tokenExists(self): # Token already exists if os.path.exists(self.tokenPath): if os.path.isfile(self.tokenPath): tokenFile = open(self.tokenPath, 'r') line = tokenFile.readline() # Check if file is properly formatted if not line.rstrip(): puts(colored.yellow('No token file detected. Please enter your token: ')) self.promptNewToken() elif len(line.split("=")) != 2: puts(colored.red("No valid token found!" + self.tokenPath)) puts(colored.yellow("Please enter token: ")) self.promptNewToken() else: token = line.split("=")[1].rstrip() self.checkToken(token) # Not sure if needed for pretty output # puts(colored.yellow("Found existing token in: \"" + self.tokenPath + "\"")) self.token = "?token=" + token tokenFile.close() # Create new token in ~/.codeassign else: puts(colored.yellow('No token file detected. Please enter your token: ')) self.promptNewToken()
def tarbell_publish(args): """Publish a site by calling s3cmd""" with ensure_settings(args) as settings, ensure_project(args) as site: bucket_name = list_get(args, 0, "staging") bucket_uri = site.project.S3_BUCKETS.get(bucket_name, False) creds = settings.config.get('s3_creds') root_url = bucket_uri[5:] extra_context = { "ROOT_URL": root_url, } tempdir = "{0}/".format(tarbell_generate(args, extra_context=extra_context, skip_args=True)) try: if bucket_uri and creds: puts("\nDeploying {0} to {1} ({2})\n".format( colored.yellow(site.project.TITLE), colored.red(bucket_name), colored.green(bucket_uri) )) if creds: s3 = S3Sync(tempdir, bucket_uri, creds['default']['key_id'], creds['default']['key']) s3.deploy_to_s3() else: show_error(("\nThere's no bucket configuration called '{0}' " "in tarbell_config.py.".format(colored.yellow(bucket_name)))) except KeyboardInterrupt: show_error("ctrl-c pressed, bailing out!") finally: _delete_dir(tempdir) puts("\nIf you have website hosting enabled, you can see your project at:") puts(colored.green("http://{0}\n".format(root_url)))
def processDroneCommand(input): elements = input.split(' ') if len(elements) > 0: i_command = elements[0] global sequencePos, sequenceList, vehicleState global targetLat, targetLon, targetAlt if i_command == 'SETUP': i_coordinates = elements[1] i_sequence = elements[2] sequencePos = 0 #reset sequence position coordinateStringList = i_coordinates.split(';') #get a list of strings 'lat,lon,alt',... for coordinateString in coordinateStringList: elements = coordinateString.split(',') # get one coordinate as list - lat,lon,alt print colored.green('elements' + str(elements)) coordinateList.append([float(element) for element in elements]) sequenceList = [int(el) for el in i_sequence.split(',')] elif i_command == 'GOTO_NEXT' and vehicleState == 'disarmed': if sequencePos < len(sequenceList): #check if not reached the end of the sequence vehicleState = 'ready' #print "current sequence position: ", sequencePos sequenceItem = sequenceList[sequencePos] targetLat, targetLon, targetAlt = coordinateList[sequenceItem] sequencePos += 1 print time.strftime("%H:%M:%S"), colored.yellow(' target location index = ' + str(sequenceItem)) else: print time.strftime("%H:%M:%S"), colored.yellow(' Finished sequence.') elif i_command == 'MANUAL': vehicleState = 'manual' else: print time.strftime("%H:%M:%S"), colored.red(' Invalid command: ' + str(input))
def run(): stemmer = Stemmer("english") pages = db.en.find() print colored.yellow("statistic words") wordstatistic = {} for page in progress.bar(pages,size=db.en.count()): data = page.get("data") if not data:continue content = data.get("content") if not content: db.en.remove({"_id":page["_id"]}) continue words = EN_WORD_CUT.split(content) for word in words: w=stemmer.stemWord(word.strip()).lower() if w and len(w)<20 and not w in EN_IGNORE: if wordstatistic.get(w): wordstatistic[w]+=1 else: wordstatistic[w]=1 print colored.yellow("save to en_words_freq") savequene = [] for k,v in progress.bar(wordstatistic.iteritems(),size=len(wordstatistic)): savequene.append({"_id":k,"freq":v}) if len(savequene) >=1000: db.en_words_freq.insert(savequene) savequene=[] if savequene:db.en_words_freq.insert(savequene) print colored.cyan( "count of en_words_freq: %d" % db.en_words_freq.count())
def tarbell_publish(args): """Publish a site by calling s3cmd""" with ensure_settings(args) as settings, ensure_project(args) as site: bucket_name = list_get(args, 0, "staging") bucket_uri = site.project.S3_BUCKETS.get(bucket_name, False) root_url = bucket_uri[5:] extra_context = { "ROOT_URL": root_url, } tempdir = "{0}/".format(tarbell_generate(args, extra_context=extra_context, skip_args=True)) try: if bucket_uri: puts("\nDeploying {0} to {1} ({2})".format( colored.yellow(site.project.TITLE), colored.red(bucket_name), colored.green(bucket_uri) )) command = ['s3cmd', 'sync', '--acl-public', '--verbose', tempdir, bucket_uri] puts("\nCalling {0}".format(colored.yellow(" ".join(command)))) call(command) else: show_error(("\nThere's no bucket configuration called '{0}' " "in tarbell_config.py.".format(colored.yellow(bucket_name)))) except KeyboardInterrupt: show_error("ctrl-c pressed, bailing out!") finally: puts("\nIf you have website hosting enabled, you can see your project at http://{0}".format(root_url)) puts("\n- Done publishing") _delete_dir(tempdir)
def print_shell(self, *args): try: for arg in args: arg = str(arg) if isinstance(type(args), types.NoneType): continue if self.color == "true": if str(arg).count(self.ruler) == len(str(arg)): print colored.green(arg), elif "Error" in arg: print colored.red(arg), elif ":\n=" in arg: print colored.red(arg), elif ":" in arg: print colored.blue(arg), elif "type" in arg: print colored.green(arg), elif "state" in arg or "count" in arg: print colored.magenta(arg), elif "id =" in arg: print colored.yellow(arg), elif "name =" in arg: print colored.cyan(arg), else: print arg, else: print arg, print except Exception, e: print colored.red("Error: "), e
def editXConfig(): print colored.yellow("\nAttempting to edit the /etc/X11/xorg.conf.") xorg_path = "/etc/X11/xorg.conf" if os.path.exists(xorg_path): os.system("sudo vim " + xorg_path) else: xorg_new_path = "/root/xorg.conf.new" if os.path.exists(xorg_new_path): print colored.red("xorg.conf not found, however, xorg.conf.new exists. Would you like to open it for editing (y/n)?") xorg_new_opt = raw_input() if xorg_new_opt == 'y': os.system("sudo vim " + xorg_new_path) else: print colored.red("Option other than \"y\" specified.") else: print colored.red("neither " + xorg_path + " or " + xorg_new_path + " exist, would you like to generate xorg.conf.new (y/n)?") xorg_gen_opt = raw_input() if xorg_gen_opt == 'y': os.system("sudo Xorg :1 -configure") else: print colored.red("Option other than \"y\" specified.") print ##LOG UPDATER os.system("echo >> logs/log.txt") os.system("echo `date` >> logs/log.txt") os.system("echo \"Configure X via MANDY, entry \"xconfig\".\" >> logs/log.txt") os.system("echo \"Commands Executed: sudo vim xorg.conf(.new), Xorg :1 -confugre\" >> logs/log.txt") os.system("echo >> logs/log.txt")
def print_shell(self, *args): try: for arg in args: arg = str(arg) if isinstance(type(args), types.NoneType): continue if self.color == 'true': if str(arg).count(self.ruler) == len(str(arg)): print colored.green(arg), elif 'Error' in arg: print colored.red(arg), elif ":\n=" in arg: print colored.red(arg), elif ':' in arg: print colored.blue(arg), elif 'type' in arg: print colored.green(arg), elif 'state' in arg or 'count' in arg: print colored.magenta(arg), elif 'id =' in arg: print colored.yellow(arg), elif 'name =' in arg: print colored.cyan(arg), else: print arg, else: print arg, print except Exception, e: print colored.red("Error: "), e
def showRepositories(): print print colored.yellow("Listing repositories: ") + "\n" sources_list = "/etc/apt/sources.list" if os.path.exists(sources_list): sources_open = open(sources_list) print sources_open.read() else: print colored.red("sources.list not found.") sources_oprl = "/etc/apt/sources.list.d/official-package-repositories.list" if os.path.exists(sources_oprl): sources_oprl_open = open(sources_oprl) print sources_oprl_open.read() else: print colored.red("official-package-repositiories.list not found.") sources_fedora = "/etc/yum.repos.d/fedora.repo" if os.path.exists(sources_fedora): sources_fedora_open = open(sources_fedora) print colored.yellow("Displaying contents of fedora.repo: ") print sources_fedora_open.read() else: print colored.red("fedora.repo not found.") print ##LOG UPDATER os.system("echo >> logs/log.txt") os.system("echo `date` >> logs/log.txt") os.system("echo \"Show bin and sbin dir via MANDY, entry \"repo\".\" >> logs/log.txt") os.system("echo \"Commands Executed: cat /etc/network/interfaces\" >> logs/log.txt") os.system("echo >> logs/log.txt")
def printFinalResult(self, output, passed, numberOfTests): # Given test cases dont exist if numberOfTests == 0 and len(self.testCases) > 0: puts(colored.red("Given test cases " + str(self.testCases) + " don't exist for problem with id " + str( self.problemId) + "!")) sys.exit(1) # No tests found elif numberOfTests == 0: puts(colored.red("No test cases found for problem with id " + str(self.problemId) + "!")) sys.exit(1) # Log was created and LOG=True(-more is used) if self.logFileBool and self.showInfo: puts(colored.yellow("Check log file \"cae_log\" in your working directory for more detailed info!")) # All tests passed if passed >= numberOfTests: puts(colored.green("\nAll (" + str(numberOfTests) + ") test/s passed! Well done!")) # All tests failed elif passed <= 0: puts(colored.red("\nAll (" + str(numberOfTests) + ") test/s failed! Try again!")) # Some test failed else: puts("\nNumber of tests passed: " + str(passed) + "/" + str(len(output['testCases']))) puts(colored.yellow("Some tests failed! Almost there, keep trying!"))
def checkToken(self, tokenInput): response = requests.post(self.pathAssociateToken + tokenInput) # Check if status not 200 if response.status_code != requests.codes.ok: puts(colored.red("Bad request!2")) sys.exit(1) data = response.json() # Check if wrong token if 'errorMessage' in data.keys(): puts(colored.red("Invalid token!")) sys.exit(1) # Modify encoding of account info self.modifyEncoding(data) # Token is ok if data['success'] and self.showInfo: puts(colored.green("Token is valid!\n")) formatNum = len(data['email'].decode('utf8')) + 8 puts(" " * (formatNum / 3) + colored.yellow("Account info")) nameLen = len(data['name'].decode('utf8')) + 8 puts("|" + "=" * formatNum + "|") # puts(colored.yellow("\tAccount info")) puts("| " + colored.yellow("Name: ") + " " + data['name'] + " " * ( formatNum - nameLen) + "|\n| " + colored.yellow( "Email: ") + data['email'] + "|") puts("|" + "=" * formatNum + "|") puts() return True
def ban_hash(self, hashvalue, rulename): print colored.yellow("[*] Banning "+ hashvalue+"...") data = {'hash': hashvalue, 'fileState': 3, 'policyIds': '0', 'name': rulename} r = requests.post(self.fileruleurl, json.dumps(data), headers=self.authJson, verify=self.b9StrongCert) r.raise_for_status() fileRule = r.json() print colored.green("[+] "+rulename+" "+hashvalue+" Banned!")
def rejar(jarpth, fresh_content_map={}, compression=zipfile.ZIP_DEFLATED): if not isfile(jarpth): raise IOError("No jar: %s" % jarpth) puts(colored.cyan("Re-jarring '%s' with %d possible replacements:" % (basename(jarpth), len(fresh_content_map)))) with indent(3, quote=" *"): for fresh_key in fresh_content_map.keys(): puts(colored.cyan(fresh_key)) print() oldjar = zipfile.ZipFile(jarpth, mode="r") newjar = zipfile.ZipFile(tempfile.mktemp(suffix=".zip"), mode="w", compression=compression) for item in progress.bar(oldjar.infolist(), label=colored.cyan("Re-jar: %s" % basename(jarpth))): replace = basename(item.filename) in fresh_content_map content = replace and fresh_content_map[basename(item.filename)] or oldjar.read(item.filename) replace and puts(colored.yellow("Replaced %s" % item.filename)) newjar.writestr(item, content) print() oldjarpth = oldjar.filename newjarpth = newjar.filename oldjar.testzip() oldjar.close() shutil.move(oldjar.filename, oldjar.filename + ".backup") newjar.testzip() newjar.close() _copy(newjarpth, oldjarpth) puts(colored.yellow("Finished restructuring jar: %s" % oldjarpth)) print()
def add_personal_account(): puts(colored.green("Link with an OneDrive Personal account:")) puts( colored.cyan( "Please use your browser to visit the following URL, sign in with your OneDrive account and " "authorize onedrive-d, then copy the callback URL back here. The callback URL is the URL at " "which the authorization page goes blank and usually starts with " + clients.PersonalClient.DEFAULT_REDIRECT_URI + "." ) ) puts() puts(colored.yellow("Please visit this URL: ")) puts(personal_client.get_auth_uri()) while True: try: url = prompt.query(str(colored.yellow("\nPlease paste the callback URL or hit [Ctrl+C] to abort:"))) account = accounts.get_personal_account(personal_client, uri=url) profile = account.profile account_store.add_account(account) puts(colored.green("Success: added account {} ({}).".format(profile.user_id, profile.name))) return except KeyboardInterrupt: puts(colored.red("Aborted.")) return except Exception as ex: puts(colored.red("Error: " + str(ex)))
def prompt_for_platform_info(platform): platform_username = None platform_password = None if platform == PLATFORM_IOS: puts(colored.yellow('We need some details so that we can build and '\ 'upload your app to iTunes Connect on your behalf:')) try: while not platform_username: platform_username = get_input('Please enter your Apple ID ' \ 'for iTunes Connect: ') while not platform_password: platform_password = get_input('Enter your password for ' \ 'iTunes Connect: ', password=True) except KeyboardInterrupt: print() sys.exit(0) elif platform == PLATFORM_ANDROID: puts(colored.yellow('We need some details so that we can build and '\ 'upload your app to the Google Developer Console on your behalf:')) try: while not platform_username: platform_username = get_input('Please enter your username ' \ 'for Google Play: ') while not platform_password: platform_password = get_input('Enter your password for ' \ 'Google Play: ', password=True) except KeyboardInterrupt: print() sys.exit(0) else: raise RuntimeError('Platform "%s" is not configured!') return platform_username, platform_password
def print_help(): puts('Baancomplete gen_api.py') puts('Use this python script to generate a baancomplete_api.sqlite file') puts('') puts('Either from library documentation generated with ttstpbaandoc') with indent(2): puts('{0} {1} {2} {3}'.format( colored.green('--doc'), colored.yellow('[file or directory (subfolders are searched too)]'), colored.green('--out'), colored.yellow('[file]')) ) puts('Or from table definitions (database credentials required)') with indent(2): puts('{0} {1} {2} {3}'.format( colored.green('--db'), colored.yellow('[mssql]'), colored.green('--out'), colored.yellow('[file]') )) puts(colored.red(''' The output file is a sqlite3 database. Copy it into the baancomplete autoload folder and name it baancomplete_api.sqlite You can change the path to the folder where baancomplete will look for the api file by setting g:baancomplete_path in .vimrc But you cannot change the filename itself. '''))
def main(self, templates_directory): templates_directory = abspath(templates_directory) puts(colored.blue("Looking in %s for templates" % templates_directory)) # Check for VagrantFile template file vagrantfile_template = join(templates_directory, 'Vagrantfile.template') if isfile(vagrantfile_template): puts(colored.blue("Found a Vagrantfile template: %s" % vagrantfile_template)) self.parent.config.setdefault('vagrantfiles', {})['default'] = vagrantfile_template else: puts(colored.yellow("No Vagrantfile template found: %s" % vagrantfile_template)) # Check for minions configurations minions_conf = join(templates_directory, 'minions_configurations') if isdir(minions_conf): logging.info("Found a minion configuration directory: %s" % minions_conf) for filename in listdir(minions_conf): filepath = join(minions_conf, filename) puts(colored.blue("Found minion conf: %s" % filepath)) self.parent.config.setdefault('minion_conf', {})[filename] = filepath else: puts(colored.yellow("No minion configuration directory found: %s" % minions_conf)) # Write config file self.parent.write_config_file()
def main(): print "\n===========================================" print colored.green("Starting Eueler Solution Generator") print "===========================================\n" euler_dir = os.path.expanduser('~/dev/euler') posts_dir = os.path.join(os.path.dirname(os.path.abspath(__file__))) index = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'index.html') problems = [] print colored.yellow("Progress:") for directory in progress.bar(os.listdir(euler_dir)): path = os.path.join(euler_dir, directory) if is_problem(path): problems.append(Problem(path)) problems = filter(lambda p: p.solutions, problems) problems.sort(key = lambda p: p.number) print "\n===========================================" print colored.green("Processed %d problem(s)" % len(problems)) print "===========================================" for i, problem in enumerate(problems): if i > 0: problem.previous = problems[i-1] if i < len(problems) - 1: problem.next = problems[i+1] generate_posts(problems, posts_dir) generate_index(problems, index)
def select_template(): global configuration list = get_templates() puts(colored.green('Please Select The Template:')) for index, value in enumerate(list): with indent(2): puts('%d) %s' % (index+1, value.get('title'))) old = configuration.get('template', 1) new = raw_input('[%s] Template: (%s) ' % (colored.green('?'), str(old))) new_value = old if not new else int(new) if new_value > len(list): puts(colored.yellow('Warning!') + ' %s is ' % new + colored.yellow('INVALID')) select_template() else: configuration.put('template', new_value) templates = get_templates() configuration_path = None; for index, value in enumerate(templates): if index+1 == configuration.get('template'): path = value.get('directory', None) configuration_path = os.path.join(os.path.dirname(__file__), path, TEMPLATES_CONFIGURATION_FILE) break if not configuration_path or not os.path.exists(configuration_path): puts(colored.yellow('Warning!') + ' %s is ' % new + colored.yellow('INVALID')) select_template() else: configuration.put('template_path', get_template_path()) make_and_run_taskmanager(configuration_path)
def ban_certificate(self, hashstate): print colored.yellow("[*] Banning certificate for "+hashstate[0]['publisher']+"...") data = {'publisherState': 3} r = requests.put(self.certificateurl+str(hashstate[0]['publisherId']), json.dumps(data), headers=self.authJson, verify=self.b9StrongCert) r.raise_for_status() fileRule = r.json() print colored.green("[+] "+hashstate[0]['publisher']+" certificate has been Banned!")
def run(self): while self.running: if self.queue.empty(): print "waiting...." time.sleep(5) else: id = self.queue.get() try: print "[%s] downloading: %s" % (self.ident, id) details = self.playapi.details(id) #print details version = details.docV2.details.appDetails.versionCode print id + " " + str(version) apk = self.playapi.download(id, version) print colored.green("finished dowloading ") + id f = open("../playapks/"+id+".apk", 'w') f.write(apk) f.close() print "written " + id + " to file" except message_mod.DecodeError, e: print colored.red("DecodeError on downloading :") + id print "Probably google blocked the IP" print colored.yellow("waiting 2:30 minutes for google to calm down") appQueue.put(id) # insert this app into the queue again (there was no principal error with the app, google blocked us) time.sleep(150) continue except IndexError: print colored.red("IndexError") + " on downloading :" + id print "Probably app is not found in app store" continue except Exception, e: print colored.red("Error on downloading :") + id print e errors.put(id) continue
def notification(flag) : """notifications tell us what going on behind the scene""" if flag == 1 : puts(colored.red("error : wrong value entered! input correct value.")) space() flag = 0 elif flag == 2 : puts(colored.yellow("success : username successfully added to data file.")) space() flag = 0 elif flag == 3 : puts(colored.red("error : invalid username entered.")) space() flag = 0 elif flag == 4 : puts(colored.red("error : username already present.")) space() flag = 0 elif flag == 5 : puts(colored.red("error : username not present in list.")) space() flag = 0 elif flag == 6 : puts(colored.yellow("success : username deleted successfully.")) space() flag = 0
def list_existing_accounts(): col = 20 all_accounts = account_store.get_all_accounts() if len(all_accounts) == 0: puts(colored.red("There is no linked account yet.")) return puts(colored.green("You have linked the following account(s) to onedrive-d:\n")) puts( columns( [(colored.red("Index")), 10], [(colored.magenta("Account ID")), col], [(colored.cyan("Account Type")), col], [(colored.green("Name")), None], ) ) account_list = [] for id, account in all_accounts.items(): puts( columns( [str(len(account_list)), 10], [account.profile.user_id, col], [account.TYPE, col], [account.profile.name, None], ) ) account_list.append(account) puts(colored.yellow("\nTo delete an account, type the index and hit [Enter]. Otherwise hit [Ctrl+C] to break.")) puts(colored.yellow("Note: all the Drives belonging to the account will also be deleted.")) puts() try: prompt_delete_account(account_list) except KeyboardInterrupt: puts(colored.green("Aborted."))
def Build(): try: year = int(sys.argv[4]) month = int(sys.argv[5]) day = int(sys.argv[6]) timestamp = datetime(year, month, day) except IndexError: print colored.red("Date missing: python main.py build yyyy mm dd") print colored.yellow("Asumming current date:") timestamp = datetime.today() print "BUILDING Database and content" # Generate Users #user_generator = UserGenerator() #user_generator.run() # Generate Places #place_generator = PlaceGenerator() #place_generator.run() # Generate Friendships #fs_generator = FriendshipGenerator() #fs_generator.run() # Generate Topics #tp_generator = TopicGenerator(amount=1000) #tp_generator.run() # Generate Circuits #circuit_generator = CircuitGenerator() #circuit_generator.run() # Generate places per circuit #pl_ct_generator = Place_circuit_generator() #pl_ct_generator.run() # Generate topics per circuit #tc_ct_generator = Topic_circuit_generator() #tc_ct_generator.run() # Generate explicit_interests #exp_generator = ExplicitGenerator(date=timestamp) #exp_generator.run() # Generate Visits #visit_generator = VisitGenerator(date=timestamp) #visit_generator.run(min_equiv=360) # 360 mins = 6 hours # Generate topic follows #follow_generator = FollowGenerator(date=timestamp) #follow_generator.run() # Generate Ratings #rating_generator = RatingsGenerator() #rating_generator.run() # Generate category follow #category_follow = CategoryFollowGenerator() #category_follow.run() # Generate Place.done pc_done = PlaceDoneGenerator() pc_done.run()
def _format_re_match(self, m, text): start_of_sentence = max(text.rfind('\n', 0, m.start()) + 1, 0) end_of_sentence = text.find('\n', m.end()), len(text) if end_of_sentence == -1: end_of_sentence = len(text) a_string_start = max(start_of_sentence, m.start() - 10) a_string_end = min(end_of_sentence, m.end() + 10) a_string = text[a_string_start : m.start()] a_string += colored.yellow(text[m.start() : m.end()]) a_string += text[m.end() : a_string_end] to_return = a_string.split('\n', 1)[0] return to_return.replace('\r',' ').replace('\n',' ')
def end_downtime(environment, ansible_context): downtime = get_downtime_record(environment) if not downtime: puts(colored.yellow('Downtime record not found.')) end_downtime = ask("Do you want to continue?") else: end_downtime = ask("Do you want to start all CommCare services?") if end_downtime: supervisor_services(environment, ansible_context, 'start') if downtime: cancel_downtime_record(environment, downtime)
def do_help(self, arg, opts=None): puts( colored.yellow( "To view scores, type in games --choice [option]. In place of [option], type in yesterday, tomorrow, or today. For example, an example of a command you could run would be: games --choice yesterday" )) puts( colored.yellow( "Doing this will show games that have been played/are currently on for that day. If the game has ended, a 'game id' will be available" )) puts( colored.yellow( "To view the boxscore of a certain game that has ended, type in games --choice [option] --gameid [id number]" )) puts( colored.yellow( "In [id number] above, type in the game id that you want, which will be provided for you! An example of this command would be: games --choice today --gameid 1" )) puts( colored.yellow( "We also have created the standings command. Simply type standings into the prompt, and it will return eastern and western conference rankings!" ))
def __enter__(self): if (os.path.isdir(self.path)): return Settings(self.path) else: puts("\n{0}: {1}".format( colored.red("Warning:"), "No Tarbell configuration found, running {0}.".format( colored.green("tarbell configure")))) settings = tarbell_configure(self.args) puts("\n\n Trying to run {0} again".format( colored.yellow("tarbell {0}".format(self.args.get(0))))) return settings
def start(self): print("host:", self.host, "port:", self.port) self.sock.connect((self.host, self.port)) self.sendMsg(self.sock, self.password) message = self.recvMsg(self.sock, '\n') print(message) if message.replace('\n', '') != '~q': username = input(colored.yellow("Enter your username: ")) Thread(target=self.sends, args=(username, )).start() Thread(target=self.recvs, args=('\n', )).start() else: sys.exit(0)
def __init__(self, *args, **kwargs): super(ProjectApplyLoopBase, self).__init__(*args, **kwargs) self.stage = kwargs.pop('stage', None) self.timeout_in_minutes = kwargs.pop('timeout_in_minutes', 15) self.region = utils.setup_region(kwargs.pop('region', None), self.settings) if self.region not in AWS_LAMBDA_REGIONS: self.puts( colored.yellow( ("Note: You are trying to use gordon in a region " "were Lambdas are not supported. This might not end nicely!") ) )
def format_prompt(self): ps1 = colored.magenta(self.source) ps1 += ' ▶ [' _sinks = ['sink:{}'.format(c) for c in self.enabledsinks] ps1 += colored.green(', '.join(_sinks)) if _sinks else colored.red('No sinks set') ps1 += '] ' if self.since or self.until: ps1 += '|{}↦{}| '.format(self.since.isoformat(' ') if self.since else '∞', self.until.isoformat(' ') if self.until else '∞') ps1 += colored.yellow('({} cached) '.format(self.count_cached_files())) ps1 += colored.red('*{} errors* '.format(len(self.errors))) ps1 += '\n > ' sys.ps1 = ps1
def table(): puts(colored.blue('1. 抓取所有資料')) puts(colored.blue('2. 閱讀資料')) puts(colored.blue('3. 刪除資料')) puts(colored.yellow('任何時候輸入 0 回到根目錄')) choice = input_check_int('請輸入選項: ') execute = dict() execute[1] = run_crawler execute[2] = read_table execute[3] = delete_table execute[0] = table execute[choice]()
def cmd_sprout(args): """Creates a new branch of given name from given branch. Defaults to current branch. """ off_branch = args.get(0) new_branch = args.get(1) if (off_branch is None) and (new_branch is None): # new_branch is required, so should be passed at least 1 arg show_error('Please pass new branch name to create.') help('sprout', to_stderr=True) sys.exit(1) elif new_branch is None: # off_branch is optional, so use specified one as new_branch new_branch = args.get(0) off_branch = get_current_branch_name() else: off_branch = fuzzy_match_branch(off_branch) branch_names = get_branch_names() if off_branch not in branch_names: print("{0} doesn't exist. Use a branch that does.".format( colored.yellow(off_branch))) sys.exit(1) if new_branch in branch_names: print("{0} already exists. Use a unique name.".format( colored.yellow(new_branch))) sys.exit(1) if repo.is_dirty(): status_log(stash_it, 'Saving local changes.') status_log( sprout_branch, 'Branching {0} to {1}.'.format(colored.yellow(off_branch), colored.yellow(new_branch)), off_branch, new_branch)
def scrape_mixcloud_url(mc_url, num_tracks=sys.maxsize, folders=False, redownload=False): """ Returns filenames to open. """ try: data = get_mixcloud_data(mc_url) except Exception as e: puts(colored.red("Problem downloading ") + mc_url) print(e) return [] filenames = [] track_artist = sanitize_filename(data['artist']) track_title = sanitize_filename(data['title']) track_fullfilepath = get_path(folders, track_artist, track_title, album_name=None, track_number=None, file_ext=data['mp3_url'][-3:]) download_text = colored.green("Downloading") if exists(track_fullfilepath): if redownload: download_text = colored.yellow("Redownloading") else: puts(colored.yellow("Track already downloaded: ") + colored.white(data['title'])) return [] puts(download_text + colored.white(': ' + data['artist'] + " - " + data['title'] + " (" + track_fullfilepath[-4:] + ")")) download_file(data['mp3_url'], track_fullfilepath) if track_fullfilepath[-4:] == '.mp3': tag_file(track_fullfilepath, artist=data['artist'], title=data['title'], year=data['year'], genre="Mix", artwork_url=data['artwork_url']) filenames.append(track_fullfilepath) return filenames
def get_acc_balance(self): query_result = self.get_data(self.balance_url) # print json.dumps(query_result['response'], indent=2) print colored.blue('Account Balance') print 'Account value: \t' + query_result['response']['accountbalance'][ 'accountvalue'] print colored.yellow('securities:') # print json.dumps(query_result['response']['accountbalance']['securities'], indent=2) securities = query_result['response']['accountbalance']['securities'] print 'total: \t' + securities['total'] print 'stocks: \t' + securities['stocks'] print 'options: \t' + securities['options'] print colored.yellow('money:') # print json.dumps(query_result['response']['accountbalance']['money'], indent=2) money = query_result['response']['accountbalance']['money'] print 'total: \t' + money['total'] print 'cash: \t' + money['cash'] print 'marginbalance: \t' + money['marginbalance'] print colored.yellow('buyingpower:') if 'buyingpower' in query_result['response']['accountbalance']: # print json.dumps(query_result['response']['accountbalance']['buyingpower'], indent=2) buyingpower = query_result['response']['accountbalance'][ 'buyingpower'] print 'cash: \t' + buyingpower['cashavailableforwithdrawal'] print 'stock: \t' + buyingpower['stock'] print 'options: \t' + buyingpower['options'] print '----------------------------------------------------------------' return
def get_acc_holdings(self): query_result = self.get_data(self.holdings_url) holdings = query_result['response']['accountholdings']['holding'] print colored.blue('Account Holdings') unrealized_total_gl = 0 # TODO bundle the data in JSON for holding in holdings: symbol = holding['instrument']['sym'] costbasis = float(holding['costbasis']) marketvalue = float(holding['marketvalue']) quantity = int(float(holding['qty'])) price = float(holding['price']) base_price = round((costbasis / quantity), 2) unrealized_gl = marketvalue - costbasis print colored.yellow(symbol) print 'cost-basis: ', costbasis, 'base price: ', base_price, 'shares:', quantity print 'market-value:', marketvalue, 'current price:', price if (unrealized_gl >= 0): print 'gain/loss: ', colored.green(unrealized_gl) else: print 'gain/loss: ', colored.red(unrealized_gl) unrealized_total_gl += unrealized_gl if (unrealized_total_gl >= 0): print colored.yellow('Total unrealized gain/loss: ' ), colored.green(unrealized_total_gl) else: print colored.yellow('Total unrealized gain/loss: '), colored.red( unrealized_total_gl) print '----------------------------------------------------------------' return
def display_services_ports(self): dns_started = docker.container_running('docker_dns') apache_url = '{}.docker'.format(self.get_vm_item('apache', 'name')) if dns_started is True else self.get_vm_item('apache', 'ip') puts('{} URL : http://{}'.format(colored.yellow('Web server'), apache_url)) if self.get_vm_item('mailcatcher', 'ip') != '': mailcatcher_ip = '{}.docker'.format(self.get_vm_item('mailcatcher', 'name')) if dns_started is True else self.get_vm_item('mailcatcher', 'ip') puts('For {} use : http://{}'.format(colored.yellow('mailcatcher'), mailcatcher_ip)) puts(' '*16 + 'and in your VM use the server "{}" with the port 25\n'.format(colored.yellow('mailcatcher'))) if self.get_vm_item('maildev', 'ip') != '': maildev_ip = '{}.docker'.format(self.get_vm_item('maildev', 'name')) if dns_started is True else self.get_vm_item('maildev', 'ip') puts('For {} use : http://{}'.format(colored.yellow('maildev'), maildev_ip)) puts(' '*12 + 'and in your VM use the server "{}" with the port 25\n'.format(colored.yellow('maildev'))) if self.get_vm_item('mongoclient', 'ip') != '': mongoclient_ip = '{}.docker'.format(self.get_vm_item('mongoclient', 'name')) if dns_started is True else self.get_vm_item('mongoclient', 'ip') puts('For {} use : http://{}:3000\n'.format(colored.yellow('mongoclient'), mongoclient_ip)) if self.get_vm_item('phpmyadmin', 'ip') != '': pma_ip = '{}.docker'.format(self.get_vm_item('phpmyadmin', 'name')) if dns_started is True else self.get_vm_item('phpmyadmin', 'ip') puts('For {} use : http://{}\n'.format(colored.yellow('phpMyAdmin'), pma_ip)) if self.get_vm_item('xhgui', 'ip') != '': xhgui_ip = '{}.docker'.format(self.get_vm_item('xhgui', 'name')) if dns_started is True else self.get_vm_item('xhgui', 'ip') puts('For {} use : http://{}\n'.format(colored.yellow('xhgui'), xhgui_ip))
def tarbell_install(command, args): """ Install a project. """ with ensure_settings(command, args) as settings: project_url = args.get(0) puts("\n- Getting project information for {0}".format(project_url)) project_name = project_url.split("/").pop() error = None # Create a tempdir and clone tempdir = tempfile.mkdtemp() try: testgit = sh.git.bake(_cwd=tempdir, _tty_in=True, _tty_out=False) # _err_to_out=True) testclone = testgit.clone(project_url, '.', '--depth=1', '--bare') puts(testclone) config = testgit.show("HEAD:tarbell_config.py") puts("\n- Found tarbell_config.py") path = _get_path(_clean_suffix(project_name, ".git"), settings) _mkdir(path) git = sh.git.bake(_cwd=path) clone = git.clone(project_url, '.', _tty_in=True, _tty_out=False, _err_to_out=True) puts(clone) puts( git.submodule.update('--init', '--recursive', _tty_in=True, _tty_out=False, _err_to_out=True)) _install_requirements(path) # Get site, run hook with ensure_project(command, args, path) as site: site.call_hook("install", site, git) except sh.ErrorReturnCode_128 as e: if e.message.endswith('Device not configured\n'): error = 'Git tried to prompt for a username or password.\n\nTarbell doesn\'t support interactive sessions. Please configure ssh key access to your Git repository. (See https://help.github.com/articles/generating-ssh-keys/)' else: error = 'Not a valid repository or Tarbell project' finally: _delete_dir(tempdir) if error: show_error(error) else: puts("\n- Done installing project in {0}".format( colored.yellow(path)))
def download_file(url, destination, download_msg=None): if not destination[0] == '/': destination = os.path.join(os.getcwd(), destination) response = requests.get(url, stream=True, timeout=(10, None)) content_length = response.headers.get('content-length') if not response.ok: err = 'Could not download file %s (server responded with status ' \ 'code %s)' % (url, response.status_code) response.close() raise SiphonCommandException(err) tmp = tempfile.mkdtemp() tmp_dest = os.path.join(tmp, os.path.basename(destination)) try: with open(tmp_dest, 'w+b') as f: if download_msg: puts(colored.yellow(download_msg)) if not content_length: f.write(response.content) return else: content_length = int(content_length) progress = 0 bar_width = 50 # Length in chars for data in response.iter_content(chunk_size=1024): progress += len(data) f.write(data) percentage = round((progress / content_length) * 100, 1) bar = int(bar_width * (progress / content_length)) stats = '%s%% (%s/%s)' % (percentage, format_size(progress), format_size(content_length)) # Include spaces at the end so that if the stat string shortens # previously printed text isn't visible sys.stdout.write('\r[%s%s] %s ' % ('=' * bar, ' ' * (bar_width - bar), stats)) sys.stdout.flush() response.close() dest_dir = os.path.dirname(destination) ensure_dir_exists(dest_dir) copyfile(tmp_dest, destination) puts(colored.green('\nDownload complete.')) except KeyboardInterrupt: puts(colored.red('\nDownload interrupted.')) raise finally: shutil.rmtree(tmp) response.close()
def get_issues(user, repo, assigned=None): github_issues_url = 'https://api.github.com/repos/%s/%s/issues' % (user, repo) params = None if assigned: params = {'assignee': user} link = requests.head(github_issues_url).headers.get( 'Link', '=1>; rel="last"') last = lambda url: int( re.compile('=(\d+)>; rel="last"$').search(url).group(1)) + 1 for pagenum in xrange(1, last(link)): connect = requests.get(github_issues_url + '?page=%s' % pagenum, params=params) try: data = json.loads(connect.content) except ValueError: raise ValueError(connect.content) if not data: puts('{0}. {1}'.format( colored.blue('octogit'), colored.cyan( 'Looks like you are perfect welcome to the club.'))) break elif 'message' in data: puts('{0}. {1}'.format(colored.blue('octogit'), colored.red(data['message']))) sys.exit(1) for issue in data: #skip pull requests try: if issue['pull_request']['html_url']: continue width = [ [colored.yellow('#' + str(issue['number'])), 4], ] if isinstance(issue['title'], unicode): issue['title'] = issue['title'].encode('utf-8') width.append([issue['title'], 80]) width.append( [colored.red('(' + issue['user']['login'] + ')'), None]) print columns(*width) except IndexError as err: puts('{0}.Error: {1} triggered -- {2}'.format( colored.blue('octogit'), colored.red('Keyerror'), colored.red(err)))
def _create_spreadsheet(name, title, path, settings): """Create Google spreadsheet""" if not settings.client_secrets: return None create = raw_input( "{0} found. Would you like to create a Google spreadsheet? [Y/n] ". format(colored.cyan("client_secrets"))) if create and not create.lower() == "y": return puts("Not creating spreadsheet...") email_message = ("What Google account should have access to this " "this spreadsheet? (Use a full email address, such as " "[email protected] or the Google account equivalent.) ") if settings.config.get("google_account"): email = raw_input("\n{0}(Default: {1}) ".format( email_message, settings.config.get("google_account"))) if not email: email = settings.config.get("google_account") else: email = None while not email: email = raw_input(email_message) try: media_body = _MediaFileUpload(os.path.join(path, '_base/_spreadsheet.xlsx'), mimetype='application/vnd.ms-excel') except IOError: show_error("_base/_spreadsheet.xlsx doesn't exist!") return None service = get_drive_api(settings.path) body = { 'title': '{0} (Tarbell)'.format(title), 'description': '{0} ({1})'.format(title, name), 'mimeType': 'application/vnd.ms-excel', } try: newfile = service.files()\ .insert(body=body, media_body=media_body, convert=True).execute() _add_user_to_file(newfile['id'], service, user_email=email) puts("\n{0}! View the spreadsheet at {1}".format( colored.green("Success"), colored.yellow( "https://docs.google.com/spreadsheet/ccc?key={0}".format( newfile['id'])))) return newfile['id'] except errors.HttpError, error: show_error('An error occurred creating spreadsheet: {0}'.format(error)) return None
def tarbell_update(command, args): """ Update the current tarbell project. """ with ensure_settings(command, args) as settings, ensure_project(command, args) as site: puts("Updating to latest blueprint\n") git = sh.git.bake(_cwd=site.base.base_dir) # stash then pull puts(colored.yellow("Stashing local changes")) puts(git.stash()) puts(colored.yellow("Pull latest changes")) puts(git.pull()) # need to pop any local changes back to get back on the original branch # this may behave oddly if you have old changes stashed if git.stash.list(): puts(git.stash.pop())
def options_nmap(): my_string = "" print(colored.magenta(my_string.center(50,"▬"))) print(colored.yellow("""\t▬▬▬▬▬▬▬▬ [1] - [*] DNS BRUTE """)) print(colored.yellow("""\t▬▬▬▬▬▬▬▬ [2] - [*] TCP SCANNER """)) print(colored.yellow("""\t▬▬▬▬▬▬▬▬ [3] - [*] UDP SCANNER """)) print(colored.yellow("""\t▬▬▬▬▬▬▬▬ [4] - [*] PING """)) print(colored.yellow("""\t▬▬▬▬▬▬▬▬ [5] - [*] SUBNET SCANNER """)) print(colored.yellow("""\t▬▬▬▬▬▬▬▬ [6] - [*] FAKE IP SCANNER """)) print(colored.yellow("""\t▬▬▬▬▬▬▬▬ [99] - [*] Exit """)) print(colored.magenta(my_string.center(50,"▬")))
def main() -> None: _, width = os.popen('stty size', 'r').read().split() f = Figlet(font='slant', width=int(width)) print(colored.yellow(f.renderText(' Mighty'))) print(colored.yellow(f.renderText('Card Game'))) print('Mighty Card Game Web Server: version 1.0.0-dev\n\n') platform = prompt([{ 'name': 'platform', 'type': 'list', 'message': 'Select platform to run', 'choices': [ 'docker', 'native', ], }])['platform'] if platform == 'docker': # TODO: add docker configuration pass else: cargo_path = install_cargo() wasm_path = install_wasm() sass_path = install_sass() install_bulma() compile_sass_files(sass_path) if not os.path.isfile('public/Cargo.toml') or not os.path.isfile( 'server/Cargo.toml'): print_error('Wrong directory; please run this in root of project') exit(1) print_info('building wasm') os.system('cd public && {} build --target web'.format(wasm_path)) print_info('building server') os.system( 'cd server && {} install --root build --path .'.format(cargo_path)) minify_files()
def unbind_template(z, hi, template_name): try: print colored.yellow(hi) hi_uid = z.import_get_device_uuid(hi_nr=hi) # import template try: z.delete_local_template(device_uid=hi_uid, template_name=template_name) except: print colored.yellow("No local copy for %s on this CI" % template_name) z.unbind_template( template_name=template_name, hi_uid=hi_uid, ) except Exception, e: print colored.red(e) return 1
def cmd_publish(args): """Pushes an unpublished branch to a remote repository.""" branch = fuzzy_match_branch(args.get(0)) if not branch: branch = get_current_branch_name() display_available_branches() if args.get(0) is None: print("Using current branch {0}".format(colored.yellow(branch))) else: print("Branch {0} not found, using current branch {1}".format(colored.red(args.get(0)),colored.yellow(branch))) branch_names = get_branch_names(local=False) if branch in branch_names: print("{0} is already published. Use a branch that isn't.".format( colored.yellow(branch))) sys.exit(1) status_log(publish_branch, 'Publishing {0}.'.format( colored.yellow(branch)), branch)
def reload(self, arguments): """ Restarts Mech machine, loads new Mechfile configuration. Usage: mech reload [options] [<instance>] Options: --provision Enable provisioning -h, --help Print this help """ instance_name = arguments['<instance>'] instance_name = self.activate(instance_name) vmrun = VMrun(self.vmx, user=self.user, password=self.password) puts_err(colored.blue("Reloading machine...")) started = vmrun.reset() if started is None: puts_err(colored.red("VM not restarted")) else: time.sleep(3) puts_err(colored.blue("Getting IP address...")) lookup = self.get("enable_ip_lookup", False) ip = vmrun.getGuestIPAddress(lookup=lookup) if ip: if started: puts_err(colored.green("VM started on {}".format(ip))) else: puts_err( colored.yellow( "VM already was started on {}".format(ip))) else: if started: puts_err( colored.green("VM started on an unknown IP address")) else: puts_err( colored.yellow( "VM already was started on an unknown IP address"))
def __str__(self): info_lines = [ str( colored.yellow('Total Cost: {:.1f}, Points: {}'.format( sum([p.now_cost / 10 for p in self.players]), sum([self.players[0].total_points * 2] + [p.total_points for p in self.players[1:-4]])))) ] player_lines = [str(colored.red('Players'))] for player_no, player in enumerate(self.players): player_lines.append("{:>2}. {}".format(player_no + 1, player)) lines = info_lines + player_lines return '\n'.join(lines)
def confirm_locations(self): '''confirms with user the locations to process by iterating over them if user choses to do so''' print "confirming locations", self.locations new_locations = [] if self.prompt: question = colored.yellow( "Do you want to process the following %d locations: %s" % ( len(self.locations), self.locations, )) if not confirm(question, True): sys.exit("User aborted, no locations to process. Exiting.")
def sends(self, username): while True: message = input() print("\033[A \033[A") print(colored.yellow("<me>"), colored.magenta(message)) if message == '~q': message += '\n' self.sendMsg(self.sock, message) self.sock.shutdown(socket.SHUT_WR) break message += "\n" message = '<' + username + '>' + message self.sendMsg(self.sock, message)
def get_board(self): boards = self.trello_client.list_boards() for i in range(len(boards)): print(str(i) + ': ' + boards[i].name) done = False while not done: board_number = int(prompt.query('Board number: ')) puts(colored.yellow('Confirm board: ' + boards[board_number].name)) if prompt.query('Confirm: y/n: ') == 'y': self.board = boards[board_number] self.board_id = self.board.id puts(colored.green(self.board.name + ' confimed')) done = True
def status(self): """Returns a nice table with the list of started containers""" try: docker_actions.check_cts_are_running(self.project_name) except SystemError: puts(colored.yellow('[INFO]') + ' stakkr is currently stopped') sys.exit(0) dns_started = docker_actions.container_running('docker_dns') self._print_status_headers(dns_started) self._print_status_body(dns_started)
def cmd_graft(args): """Merges an unpublished branch into the given branch, then deletes it.""" branch = args.get(0) into_branch = args.get(1) if not branch: print 'Please specify a branch to graft:' display_available_branches() sys.exit() if not into_branch: into_branch = repo.head.ref.name branch_names = get_branch_names(local=True, remote=False) remote_branch_names = get_branch_names(local=False, remote=True) if branch not in branch_names: print "{0} doesn't exist. Use a branch that does.".format( colored.yellow(branch)) sys.exit(1) if branch in remote_branch_names: print "{0} is published. To graft it, unpublish it first.".format( colored.yellow(branch)) sys.exit(1) if into_branch not in branch_names: print "{0} doesn't exist. Use a branch that does.".format( colored.yellow(into_branch)) sys.exit(1) # Go to new branch. switch_to(into_branch) status_log( graft_branch, 'Grafting {0} into {1}.'.format(colored.yellow(branch), colored.yellow(into_branch)), branch)
def bulk_just_share(): csv_data = read_csv(CSV_FILE) data = process_links(csv_data) times = generate_schedule(len(data), INTERVAL * 15) schedule = [] for obj, time in zip(data, times): obj['time'] = time schedule.append(obj) puts('INFO: Post schedule that has been generated is %s' % schedule) puts(colored.yellow('INFO: Dispatching batch to be scheduled')) post_scheduled_message(schedule)