def prompt_edit_drive(drive_list): try: while True: puts() target_id = prompt.query("Please enter command: ", validators=[validators.RegexValidator("-?[0-9]+")]) try: if target_id[0] == "-": target_id = int(target_id[1:]) drive = drive_list[target_id] if drive is None: raise ValueError("the Drive has been deleted.") drive_store.delete_record(drive) drive_list[target_id] = None puts(colored.green('Successfully deleted Drive "%s"' % drive.drive_id)) else: target_id = int(target_id) drive = drive_list[target_id] if drive is None: raise ValueError("the Drive has been deleted.") prompt_drive_config(drive) drive_store.add_record(drive) puts(colored.green('Successfully edited Drive "%s"' % drive.drive_id)) except ValueError as ex: puts(colored.red("Error: " + str(ex))) except KeyboardInterrupt: puts(colored.green("Aborted."))
def add_entry(): """Adds an entry to the diary""" title_string = "Title (press %s when finished)" % finish_key # print(title_string) puts(colored.yellow(title_string)) puts(colored.green("="*len(title_string))) title = sys.stdin.read().strip() if title: entry_string = "\nEnter your entry: (press %s when finished)" % finish_key puts(colored.yellow(entry_string)) puts(colored.green("="*len(entry_string))) # reads all the data entered from the user data = sys.stdin.read().strip() if data: # if something was actually entered puts(colored.yellow( "\nEnter comma separated tags(if any!): (press %s when finished) : " % finish_key)) puts(colored.green("="*(len(title_string)+33))) tags = sys.stdin.read().strip() tags = processTags(tags) puts(colored.green("\n"+"="*len(entry_string))) # anything other than 'n' if input("\nSave entry (y/n) : ").lower() != 'n': DiaryEntry.create(content=data, tags=tags, title=title) puts(colored.green("Saved successfully")) else: puts( colored.red("No title entered! Press Enter to return to main menu")) input() clear() return
def status(arguments): output = StringIO() verbose = initialise(arguments) environments = arguments['<environment>'] if environments: puts(columns( [(colored.green('Nickname')), 15], [(colored.green('Instance Type')), 10], [(colored.green('Status')), 20], [(colored.green('Instance href')), 60], ), stream=output.write) for environment in environments: server = righteous.find_server(environment) if server: settings = righteous.server_settings(server['href']) if verbose: server_info = righteous.server_info(server['href']) puts('Server Info:\n' + colored.cyan(pformat(server_info))) puts('Server Settings:\n' + colored.cyan(pformat(settings))) puts(columns( [environment, 15], [settings['ec2-instance-type'], 10], [server['state'] if server else 'Found', 20], [server['href'] if server else 'Not', 60], ), stream=output.write) else: puts(colored.red('%s: Not Found' % environment), stream=output.write) print output.getvalue()
def add_personal_account(): puts(colored.green("Link with an OneDrive Personal account:")) puts( colored.cyan( "Please use your browser to visit the following URL, sign in with your OneDrive account and " "authorize onedrive-d, then copy the callback URL back here. The callback URL is the URL at " "which the authorization page goes blank and usually starts with " + clients.PersonalClient.DEFAULT_REDIRECT_URI + "." ) ) puts() puts(colored.yellow("Please visit this URL: ")) puts(personal_client.get_auth_uri()) while True: try: url = prompt.query(str(colored.yellow("\nPlease paste the callback URL or hit [Ctrl+C] to abort:"))) account = accounts.get_personal_account(personal_client, uri=url) profile = account.profile account_store.add_account(account) puts(colored.green("Success: added account {} ({}).".format(profile.user_id, profile.name))) return except KeyboardInterrupt: puts(colored.red("Aborted.")) return except Exception as ex: puts(colored.red("Error: " + str(ex)))
def Run(report): print colored.yellow("[*] Creating time table.") timelist=[] timetable={} for item in report['process'].get('filemod_complete', []): timelist.append(item.split("|")[1][:-4]) try: for item in report['process'].get('regmod_complete', []): timelist.append(item.split("|")[1][:-4]) except: print colored.red("[-] No registry modifications made.") pass try: for item in report['process'].get('netconn_complete', []): timelist.append(item.split("|")[0][:-4]) except: print colored.red("[-] No network connections found in process report.") pass for item in report['process'].get('modload_complete', []): timelist.append(item.split("|")[0][:-4]) timelist=sorted(set(timelist)) for time in timelist: #print datetime.strptime(time, '%Y-%m-%d %H:%M:%S') timetable[time]=[] print colored.green("[+] Completed.\n") return timetable,timelist
def run(self): while self.running: if self.queue.empty(): print "waiting...." time.sleep(5) else: id = self.queue.get() try: print "[%s] downloading: %s" % (self.ident, id) details = self.playapi.details(id) #print details version = details.docV2.details.appDetails.versionCode print id + " " + str(version) apk = self.playapi.download(id, version) print colored.green("finished dowloading ") + id f = open("../playapks/"+id+".apk", 'w') f.write(apk) f.close() print "written " + id + " to file" except message_mod.DecodeError, e: print colored.red("DecodeError on downloading :") + id print "Probably google blocked the IP" print colored.yellow("waiting 2:30 minutes for google to calm down") appQueue.put(id) # insert this app into the queue again (there was no principal error with the app, google blocked us) time.sleep(150) continue except IndexError: print colored.red("IndexError") + " on downloading :" + id print "Probably app is not found in app store" continue except Exception, e: print colored.red("Error on downloading :") + id print e errors.put(id) continue
def get_block_js(func, **modifiers): """Возвращает js для блока с заданными модификаторами. """ from importlib import import_module import os.path mo = import_module(func.__module__) dir_name = os.path.dirname(mo.__file__) main_js_filename = os.path.join( dir_name, 'js', get_block_name(func) + '.js') js_filename_with_modifiers = os.path.join( dir_name, 'js', get_blockname_with_modifiers(func, **modifiers) + '.js') result = u'' print 'checking', main_js_filename, if os.path.exists(main_js_filename): print colored.green('found') with open(main_js_filename) as f: result += f.read().decode('utf-8') + u'\n' else: print colored.red('missing') if js_filename_with_modifiers != main_js_filename: print 'checking', js_filename_with_modifiers, if os.path.exists(js_filename_with_modifiers): print colored.green('found') with open(js_filename_with_modifiers) as f: result += f.read().decode('utf-8') + u'\n' else: print colored.red('missing') return result
def check_execution(self, md5): parentquery='parent_md5:'+md5 md5query='md5:'+md5 if md5query.endswith(" "): print colored.red("[-] Bit9 did not capture the MD5 :(\n") else: print colored.yellow("[*] Checking if Parent MD5 process in Carbon Black...") parentresult = self.cb.process_search(parentquery, sort='start desc') if parentresult['total_results']==0: print colored.cyan("[+] Not a Parent MD5 process") else: cbparentmd5url=self.parentmd5url+md5+"&sort=&rows=10&start=0" print colored.green("[+] Parent MD5 event found in Carbon Black.") print colored.cyan(cbparentmd5url) print colored.yellow("[*] Checking if MD5 seen in Carbon Black...") md5result = self.cb.process_search(md5query, sort='start desc') if md5result['total_results'] == 0: print colored.cyan("[+] Not seen in Carbon Black.") else: cbmd5url=self.md5url+md5+"&sort=&rows=10&start=0" print colored.green("[+] MD5 Found in CB.") print colored.cyan(cbmd5url) #cb=CB()
def ban_hash(self, hashvalue, rulename): print colored.yellow("[*] Banning "+ hashvalue+"...") data = {'hash': hashvalue, 'fileState': 3, 'policyIds': '0', 'name': rulename} r = requests.post(self.fileruleurl, json.dumps(data), headers=self.authJson, verify=self.b9StrongCert) r.raise_for_status() fileRule = r.json() print colored.green("[+] "+rulename+" "+hashvalue+" Banned!")
def print_shell(self, *args): try: for arg in args: arg = str(arg) if isinstance(type(args), types.NoneType): continue if self.color == "true": if str(arg).count(self.ruler) == len(str(arg)): print colored.green(arg), elif "Error" in arg: print colored.red(arg), elif ":\n=" in arg: print colored.red(arg), elif ":" in arg: print colored.blue(arg), elif "type" in arg: print colored.green(arg), elif "state" in arg or "count" in arg: print colored.magenta(arg), elif "id =" in arg: print colored.yellow(arg), elif "name =" in arg: print colored.cyan(arg), else: print arg, else: print arg, print except Exception, e: print colored.red("Error: "), e
def main(): print "\n===========================================" print colored.green("Starting Eueler Solution Generator") print "===========================================\n" euler_dir = os.path.expanduser('~/dev/euler') posts_dir = os.path.join(os.path.dirname(os.path.abspath(__file__))) index = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'index.html') problems = [] print colored.yellow("Progress:") for directory in progress.bar(os.listdir(euler_dir)): path = os.path.join(euler_dir, directory) if is_problem(path): problems.append(Problem(path)) problems = filter(lambda p: p.solutions, problems) problems.sort(key = lambda p: p.number) print "\n===========================================" print colored.green("Processed %d problem(s)" % len(problems)) print "===========================================" for i, problem in enumerate(problems): if i > 0: problem.previous = problems[i-1] if i < len(problems) - 1: problem.next = problems[i+1] generate_posts(problems, posts_dir) generate_index(problems, index)
def tarbell_publish(args): """Publish a site by calling s3cmd""" with ensure_settings(args) as settings, ensure_project(args) as site: bucket_name = list_get(args, 0, "staging") bucket_uri = site.project.S3_BUCKETS.get(bucket_name, False) creds = settings.config.get('s3_creds') root_url = bucket_uri[5:] extra_context = { "ROOT_URL": root_url, } tempdir = "{0}/".format(tarbell_generate(args, extra_context=extra_context, skip_args=True)) try: if bucket_uri and creds: puts("\nDeploying {0} to {1} ({2})\n".format( colored.yellow(site.project.TITLE), colored.red(bucket_name), colored.green(bucket_uri) )) if creds: s3 = S3Sync(tempdir, bucket_uri, creds['default']['key_id'], creds['default']['key']) s3.deploy_to_s3() else: show_error(("\nThere's no bucket configuration called '{0}' " "in tarbell_config.py.".format(colored.yellow(bucket_name)))) except KeyboardInterrupt: show_error("ctrl-c pressed, bailing out!") finally: _delete_dir(tempdir) puts("\nIf you have website hosting enabled, you can see your project at:") puts(colored.green("http://{0}\n".format(root_url)))
def get_hash_state(): print colored.magenta("[?] Paste in the MD5, SHA-1 or SHA256 hash:") hashvalue=raw_input().strip(" ") if len(hashvalue)==32: print colored.yellow("[*] MD5 Detected.") hashtype="md5" if len(hashvalue)==40: print colored.yellow("[*] SHA-1 Detected.") hashtype="sha1" if len(hashvalue)==64: print colored.yellow("[*] SHA-256 Detected.") hashtype="sha256" print colored.yellow("[*] Checking "+str(hashvalue)) hashstate=bit9.check_hash(hashtype, hashvalue) if len(hashstate)==0: print colored.yellow("[-] Hash does not exist in our envirnment") print colored.magenta("[?] Would you like to ban this Hash?") userinput=get_user_input() if userinput==True: bit9.ban_hash(hashvalue,"Pre-emptive Hash Ban") if userinput==False: print colored.yellow("[*] Okay, not banning the Hash.") else: bit9.eval_hash_state(hashstate) print colored.green("[+] Hash checking complete.")
def ban_certificate(self, hashstate): print colored.yellow("[*] Banning certificate for "+hashstate[0]['publisher']+"...") data = {'publisherState': 3} r = requests.put(self.certificateurl+str(hashstate[0]['publisherId']), json.dumps(data), headers=self.authJson, verify=self.b9StrongCert) r.raise_for_status() fileRule = r.json() print colored.green("[+] "+hashstate[0]['publisher']+" certificate has been Banned!")
def select_template(): global configuration list = get_templates() puts(colored.green('Please Select The Template:')) for index, value in enumerate(list): with indent(2): puts('%d) %s' % (index+1, value.get('title'))) old = configuration.get('template', 1) new = raw_input('[%s] Template: (%s) ' % (colored.green('?'), str(old))) new_value = old if not new else int(new) if new_value > len(list): puts(colored.yellow('Warning!') + ' %s is ' % new + colored.yellow('INVALID')) select_template() else: configuration.put('template', new_value) templates = get_templates() configuration_path = None; for index, value in enumerate(templates): if index+1 == configuration.get('template'): path = value.get('directory', None) configuration_path = os.path.join(os.path.dirname(__file__), path, TEMPLATES_CONFIGURATION_FILE) break if not configuration_path or not os.path.exists(configuration_path): puts(colored.yellow('Warning!') + ' %s is ' % new + colored.yellow('INVALID')) select_template() else: configuration.put('template_path', get_template_path()) make_and_run_taskmanager(configuration_path)
def tarbell_configure(args): """Tarbell configuration routine""" puts("Configuring Tarbell. Press ctrl-c to bail out!") path = get_config_from_args(args) _get_or_create_config_dir(path) settings = {} settings.update(_setup_google_spreadsheets(path)) settings.update(_setup_s3(path)) settings.update(_setup_tarbell_project_path(path)) settings.update(_setup_default_templates(path)) settings_path = os.path.join(path, "settings.yaml") _backup(path, "settings.yaml") with open(settings_path, "w") as f: puts("\nCreating {0}".format(colored.green(settings_path))) yaml.dump(settings, f, default_flow_style=False) puts("\n- Done configuring Tarbell. Type `{0}` for help.\n" .format(colored.green("tarbell"))) return Settings(path)
def enter_movie(): validinput = 0 name = raw_input("Enter the movie name: ") print colored.green("\nChoose category\n1. new release @ 1000 per day\n2. Children's @ 300 per day\n3. Regular @ 500 per day" ) # validate user input while not validinput: try: categorychoice = int ( raw_input('Enter your choice [1-3] : ')) if categorychoice < 1 or categorychoice > 3 : print colored.red("Choice out of range. Try again") else: if categorychoice == 1: # new release has a price of 1000 per day category = "new release" price = 1000 elif categorychoice == 2: # children's has a price of 300 per day category = "Children's" price = 300 else: # regular has aprice of 500 per day category = "Regular" price = 500 movie = Movie(name, category, price) # Keziah is an instance of Storekeeper, add the movie to Storekeeper's movies Keziah.movies.append(movie) # help storekeeper know movie is well saved print "Movie is saved" validinput = 1 ## set it to 1 to validate input and to terminate the while..not loop except ValueError : print colored.red("This is not an integer. Try again")
def get_dados(): global cache try: data="1" entrada = {'info': data} r = requests.get("http://cafeteiraonline.rsilveira.info/api.php", params=entrada) parser = json.loads(r.content) print (colored.green('Café:')), print parser['cafe'][0] print (colored.green('Hora:')), print parser['hora'][0] print (colored.green('Data:')), print parser['data'][0] cafeaux=parser['cafe'][0] if cafeaux =="1" and cache =="0": cache ="1" return True elif cafeaux =="1" and cache =="1": cache ="1" return False elif cafeaux =="0" and cache =="1": cache ="0" return False elif cafeaux =="0" and cache =="0": cache ="0" return False except: print(colored.red('Erro'))
def processDroneCommand(input): elements = input.split(' ') if len(elements) > 0: i_command = elements[0] global sequencePos, sequenceList, vehicleState global targetLat, targetLon, targetAlt if i_command == 'SETUP': i_coordinates = elements[1] i_sequence = elements[2] sequencePos = 0 #reset sequence position coordinateStringList = i_coordinates.split(';') #get a list of strings 'lat,lon,alt',... for coordinateString in coordinateStringList: elements = coordinateString.split(',') # get one coordinate as list - lat,lon,alt print colored.green('elements' + str(elements)) coordinateList.append([float(element) for element in elements]) sequenceList = [int(el) for el in i_sequence.split(',')] elif i_command == 'GOTO_NEXT' and vehicleState == 'disarmed': if sequencePos < len(sequenceList): #check if not reached the end of the sequence vehicleState = 'ready' #print "current sequence position: ", sequencePos sequenceItem = sequenceList[sequencePos] targetLat, targetLon, targetAlt = coordinateList[sequenceItem] sequencePos += 1 print time.strftime("%H:%M:%S"), colored.yellow(' target location index = ' + str(sequenceItem)) else: print time.strftime("%H:%M:%S"), colored.yellow(' Finished sequence.') elif i_command == 'MANUAL': vehicleState = 'manual' else: print time.strftime("%H:%M:%S"), colored.red(' Invalid command: ' + str(input))
def create(app_name, app_path): auth = Auth() # Create the app server-side siphon = Siphon(auth.auth_token) obj = siphon.create(app_name) app_id = obj['id'] # server gives us back our internal app ID # Populate our new directory with template files copy_app_template(app_name, app_path) # Write out a .siphon configuration to the new direcotry conf = Config(directory=app_name) conf.app_id = app_id # Copy our .siphonignore file over siphon_ignore = os.path.join(CLI_RESOURCES, '.siphonignore') shutil.copyfile(siphon_ignore, os.path.join(app_path, '.siphonignore')) puts(colored.green('Siphon app created at %s' % app_path)) # Register Mixpanel event username = auth.username mixpanel_event(MIXPANEL_EVENT_CREATE, username, {'app_id': app_id, 'existing_app': False}) # Write out the Siphonfile with open(os.path.join(app_name, SIPHON_USER_CONFIG), 'w') as fp: json.dump({'base_version': obj['base_version']}, fp, indent=2) # Implicitly do a push too with cd(app_path): from siphon.cli.commands.push import push push(track_event=False) puts(colored.green('Done.'))
def list_existing_drives(): puts(colored.green('List registered Drives for editing / deleting...\n')) with indent(4, quote=' >'): puts('To edit a Drive, type the index of the Drive in the table.') puts('To delete a Drive, type a minus sign followed by the index of the Drive.') puts('To abort and return to main menu, hit [Ctrl+C].') puts('For example, type "1" to edit the Drive indexed 1, and type "-1" to delete it.') puts() account_store.get_all_accounts() drive_list = [] for key, drive in drive_store.get_all_drives().items(): drive_id, account_id, account_type = key with indent(4): puts(columns( [(colored.green('Index')), 8], [(colored.magenta('Drive ID')), 17], [(colored.magenta('Drive Type')), 12], [(colored.cyan('Account')), 20], [(colored.yellow('Local Root')), None])) profile = drive.root.account.profile puts(columns( [str(len(drive_list)), 8], [drive_id, 17], [drive.type, 12], ["{} ({})".format(account_id, profile.name), 20], [drive.config.local_root, None])) drive_list.append(drive) prompt_edit_drive(drive_list)
def print_path_info(address, path, coin_symbol, wif=None): assert path, path assert coin_symbol, coin_symbol assert address, address if wif: address_formatted = '%s/%s' % (address, wif) else: address_formatted = address if USER_ONLINE: addr_balance = get_total_balance( address=address, coin_symbol=coin_symbol, ) with indent(2): puts(colored.green('%s (%s) - %s' % ( path, address_formatted, format_crypto_units( input_quantity=addr_balance, input_type='satoshi', output_type=UNIT_CHOICE, coin_symbol=coin_symbol, print_cs=True, ), ))) else: with indent(2): puts(colored.green('%s (%s)' % ( path, address_formatted, )))
def scp(self, src, dst): vm = Vmrun(self.vmx) ip = vm.ip() user = self.user if ip: src_is_host = src.startswith(":") dst_is_host = dst.startswith(":") if src_is_host and dst_is_host: puts(colored.red("Both src and host are host destinations")) exit() if dst_is_host: dst = dst[1:] puts("Sending {src} to {user}@{ip}:{dst}".format( user=colored.green(user), ip=colored.green(ip), src=src, dst=dst, )) os.system('scp {} {}@{}:{}'.format(src, user, ip, dst)) else: src = src[1:] puts("Getting {user}@{ip}:{src} and saving in {dst}".format( user=colored.green(user), ip=colored.green(ip), src=src, dst=dst, )) os.system('scp {}@{}:{} {}'.format(user, ip, src, dst)) else: puts(colored.red("IP not found")) return
def __init__(self): self.command = '' self.help = '' self.description = '' # Add a default additional description of the program self.tuttleWebSiteUserDoc = colored.green('http://www.tuttleofx.org/user-documentation') self.tuttleWebSiteSequences = colored.green('http://www.tuttleofx.org/user-documentation/command-line-examples') if clintVersion >= '0.3.3': self.tuttleWebSiteUserDoc.bold=True self.tuttleWebSiteSequences.bold=True self.epilog = ''' See the online documentation for more details: General user documentation ''' + self.tuttleWebSiteUserDoc + ''' How to manipulate sequences ''' + self.tuttleWebSiteSequences + ''' ''' # create logger self.logger = logging.getLogger('SAM') self.logger.setLevel(logging.DEBUG) # Add a console handler self.addConsoleHandler() # Set default log level of all sam tools to warning self.setLogLevel(2)
def test_regex(self, line): '''Test a regex to see how many actions match. ''' try: rgx = re.compile(line) except sre_constants.error as e: msg = red('Bad regex: ') + green(repr(line)) + ' You have failed the bat-test.' puts(msg) print e return self.current_rgx = rgx puts('Testing ' + colored.green(line)) matched = [] for action in self.actions.unmatched: m = re.search(line, action) if m: matched.append([action, m.groupdict()]) if not matched: with indent(4, quote=' >'): puts(red('Aw, snap!') + ' ' + cyan('No matches found!')) return self.current_rgx = line self.show_matches_start = 0 with indent(4, quote=' >'): puts('Found ' + colored.red(len(matched)) + ' matches:') self._print_matches(matched[:self.show]) self.matched = matched
def list_existing_accounts(): col = 20 all_accounts = account_store.get_all_accounts() if len(all_accounts) == 0: puts(colored.red("There is no linked account yet.")) return puts(colored.green("You have linked the following account(s) to onedrive-d:\n")) puts( columns( [(colored.red("Index")), 10], [(colored.magenta("Account ID")), col], [(colored.cyan("Account Type")), col], [(colored.green("Name")), None], ) ) account_list = [] for id, account in all_accounts.items(): puts( columns( [str(len(account_list)), 10], [account.profile.user_id, col], [account.TYPE, col], [account.profile.name, None], ) ) account_list.append(account) puts(colored.yellow("\nTo delete an account, type the index and hit [Enter]. Otherwise hit [Ctrl+C] to break.")) puts(colored.yellow("Note: all the Drives belonging to the account will also be deleted.")) puts() try: prompt_delete_account(account_list) except KeyboardInterrupt: puts(colored.green("Aborted."))
def __format_gist(gist): """ Formats the output for a Gist metadata object. :param gist: :class: `Gist <Gist>` instance. """ # Calculate the number of columns of the current terminal window rows, columns = os.popen('stty size', 'r').read().split() # Prepare the Header gists_string = colored.cyan('-' * int(columns)) + "\n" gists_string += colored.cyan("Gist [" + gist.identifier + "]") + '\n' gists_string += colored.cyan('-' * int(columns)) + "\n" # Format Gist data gists_string += colored.green('Description:\t') if gist.description: gists_string += gist.description + '\n' gists_string += colored.green('Url:\t\t') gists_string += gist.url + '\n' gists_string += colored.green('Html Url:\t') gists_string += gist.html_url + '\n' gists_string += colored.green('Private:\t') gists_string += str(not gist.public) + '\n' gists_string += colored.green('Files:\t\t') gist_names = [gistfile.filename for gistfile in gist.files] stringfiles = "[" + ", ".join(gist_names) + "]" gists_string += colored.red(stringfiles) + '\n' # Prepare the Footer gists_string += colored.cyan('-' * int(columns)) + "\n" return gists_string
def __format_file(file_gist): """ Formats the output for a GistFile object. :param gist: :class: `GistFile <GistFile>` instance. """ # Calculate the number of columns of the current terminal window rows, columns = os.popen('stty size', 'r').read().split() # Prepare the Header gist_string = colored.cyan('-' * int(columns)) + "\n" gist_string += colored.cyan("File [" + file_gist.filename + "]\n") gist_string += colored.cyan('-' * int(columns)) + "\n" # Format Gist data gist_string += (colored.green("Language:") + " " + colored.red(file_gist.language) + "\n") gist_string += (colored.green("Size:") + " " + colored.red(file_gist.size) + "\n") gist_string += (colored.green("Raw Url:") + " " + colored.red(file_gist.raw_url + "\n")) gist_string += (colored.green("Content:\n\n") + file_gist.content + "\n\n") # Prepare the Footer gist_string += colored.cyan('-' * int(columns)) + "\n" return gist_string
def print_help(): puts('Baancomplete gen_api.py') puts('Use this python script to generate a baancomplete_api.sqlite file') puts('') puts('Either from library documentation generated with ttstpbaandoc') with indent(2): puts('{0} {1} {2} {3}'.format( colored.green('--doc'), colored.yellow('[file or directory (subfolders are searched too)]'), colored.green('--out'), colored.yellow('[file]')) ) puts('Or from table definitions (database credentials required)') with indent(2): puts('{0} {1} {2} {3}'.format( colored.green('--db'), colored.yellow('[mssql]'), colored.green('--out'), colored.yellow('[file]') )) puts(colored.red(''' The output file is a sqlite3 database. Copy it into the baancomplete autoload folder and name it baancomplete_api.sqlite You can change the path to the folder where baancomplete will look for the api file by setting g:baancomplete_path in .vimrc But you cannot change the filename itself. '''))
def print_shell(self, *args): try: for arg in args: arg = str(arg) if isinstance(type(args), types.NoneType): continue if self.color == 'true': if str(arg).count(self.ruler) == len(str(arg)): print colored.green(arg), elif 'Error' in arg: print colored.red(arg), elif ":\n=" in arg: print colored.red(arg), elif ':' in arg: print colored.blue(arg), elif 'type' in arg: print colored.green(arg), elif 'state' in arg or 'count' in arg: print colored.magenta(arg), elif 'id =' in arg: print colored.yellow(arg), elif 'name =' in arg: print colored.cyan(arg), else: print arg, else: print arg, print except Exception, e: print colored.red("Error: "), e
def delete(self, arguments): """ Delete a snapshot taken previously with snapshot save. Usage: mech snapshot delete [options] <name> [<instance>] Options: -h, --help Print this help """ name = arguments['<name>'] instance_name = arguments['<instance>'] instance_name = self.activate(instance_name) vmrun = VMrun(self.vmx) if vmrun.deleteSnapshot(name) is None: puts_err(colored.red("Cannot delete name")) else: puts_err(colored.green("Snapshot {} deleted".format(name)))
def _render_category_choice_view(self, paginator, current_page, info, highlighted_item): self._clear_screen() puts_header("Choose a category") puts_key_value("Current category", colored.yellow(info['Category'])) puts(colored.green("-" * 80)) puts_key_value("Main categories", "[A]ll, [I]nstalled") puts_key_value("Order by", "Na[m]e, Pac[K]ages") categories = paginator.page(current_page) starting_index = paginator.pagination * (current_page - 1) pagination_tpl = "Page %s of %s" % (current_page, paginator.num_pages) puts(colored.green("-" * 80), newline=False) puts(pagination_tpl) puts(colored.green("-" * 80)) for index, category in enumerate(categories): quote = "%s)" % str(starting_index + index + 1) if index + 1 == highlighted_item: quote += " * " with indent(indent=6, quote=quote): title = colored.green(category.title) #title += "[%s]" %len(category.packages) puts(columns([title, 40], [ colored.yellow("[%s packages]" % len(category.packages)), 40 ]), newline=False) #puts("%s" %category.description or "") puts() #with indent(indent=6): # puts_key_value("Packages", "%s" %len(category.packages)) #puts("%s" %category.description) puts(colored.green("-" * 80), newline=False) puts(pagination_tpl) puts(colored.green("-" * 80))
def getTokenInfo(state): # get token address and decimal places from symbol info = name_to_token(state.get('token')) # user wants to use custom token if address != 0x0 if (state.get('token_address') != '0x0000000000000000000000000000000000000000'): try: token = getTokenContract( state.get('network'), to_checksum_address(state.get('token_address'))) except BadFunctionCallOutput: raise UsageError( 'there doesn\'t seem to be a token at that address...') print('Getting token info... ', end='', flush=True) info.update({ #'name' : t.functions.name().call()), 'addr': state.get('token_address'), 'name': token.functions.symbol().call(), 'decimals': token.functions.decimals().call() }) puts(colored.green('done.')) # fail if no token info was provided if (not info): print(f'Error {data.get("tokenName")} is not supported.') exit(1) return { 'token': info.get('name'), 'token_address': info.get('addr'), 'token_decimals': int(info.get('decimals')), # set amount to correct unit by multiplying the human-readable unit by # the number of decimals. 'amount': int(state.get('amount') * pow(10, int(info.get('decimals')))) }
def microsoft(phone_number): global name global microsoft_load_balancer microsoft_load_balancer=True options = webdriver.ChromeOptions() options.add_argument('--headless') options.add_argument('--no-sandbox') options.add_argument('disable-infobars') options.add_experimental_option('prefs', {'intl.accept_languages': 'en,en_US'}) options.add_argument("--lang=en") prefs = { "translate_whitelists": {"ru":"en"}, "translate":{"enabled":"true"} } options.add_experimental_option("prefs", prefs) options.add_argument("user-agent=Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.129 Safari/537.36") loc=os.getcwd() driver = uc.Chrome(options=options) driver.get("https://login.live.com/") try: WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.NAME, 'loginfmt'))).send_keys(phone_number) WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, "/html/body/div/form[1]/div/div/div[2]/div/div/div[1]/div[2]/div[2]/div/div/div/div[4]/div/div/div/div/input"))).click() #/html/body/div/form[1]/div/div/div[1]/div[2]/div[2]/div/div[2]/div/div[2]/div/div[2]/input WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.NAME, "passwd"))).send_keys("QWKEQĞPWEQWE") WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, "/html/body/div/form[1]/div/div/div[2]/div/div/div[1]/div[2]/div[2]/div/div[2]/div/div[3]/div[2]/div/div/div/div/input"))).click() name=WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, "passwordError"))).text if name=="Your account or password is incorrect. If you don't remember your password, reset it now.": name="This Phone Number Is Connected To A Micr0s0ft Account!" print(colored.green("[+]")+colored.blue(name)) else: name="This Phone Number Is Not Connected To Any Micr0s0ft Account!" print(colored.magenta("[-]")+colored.red(name)) except: name="This Phone Number Is Not Connected To Any Micr0s0ft Account!" print(colored.magenta("[-]")+colored.red(name)) microsoft_load_balancer=False driver.close()
def scrape_audiomack_url(mc_url, num_tracks=sys.maxsize, folders=False): """ Returns: list: filenames to open """ try: data = get_audiomack_data(mc_url) except Exception as e: puts(colored.red("Problem downloading ") + mc_url) print(e) filenames = [] track_artist = sanitize_filename(data['artist']) track_title = sanitize_filename(data['title']) track_filename = track_artist + ' - ' + track_title + '.mp3' if folders: if not exists(track_artist): mkdir(track_artist) track_filename = join(track_artist, track_filename) if exists(track_filename): puts( colored.yellow("Skipping") + colored.white(': ' + data['title'] + " - it already exists!")) return [] puts( colored.green("Downloading") + colored.white(': ' + data['artist'] + " - " + data['title'])) download_file(data['mp3_url'], track_filename) tag_file(track_filename, artist=data['artist'], title=data['title'], year=data['year'], genre=None, artwork_url=data['artwork_url']) filenames.append(track_filename) return filenames
def download(url, destination_dir, file_name, file_size): """ Downloads a file. :param url: remote location of the file :param destination_dir: target directory where the file will be download :param file_name: name of the target file :param file_size: size of the file to download. """ destination_path = os.path.join(destination_dir, file_name) print( colored.green(literals.DOWNLOADING % (url, destination_path, file_size))) # Open the remote url as a file, read it and write it in # target directory u = requests.get(url) with open(destination_path, 'wb') as f: f.write(u.content)
async def save_signal(strategy, df, i, perf, direction): try: signl = Signals.objects.create(broker=strategy.broker, symbol=strategy.symbol, period=strategy.period, system=strategy.system, direction=direction, date_time=df.ix[i].name, returns=None) signl.save() print( colored.green("Signal saved for {0} -> {1}.".format( df.ix[i].name, strategy.symbol))) except IntegrityError: if not perf is None: if len(perf.index) > 0: await update_returns(strategy=strategy, direction=direction, \ date_time=df.ix[i].name, perf=perf.shift(-1)) except Exception as err: print(colored.red("At save_signal {}".format(err)))
def img_writer(info: dict, pdfm: list, df: PandasDF, job) -> None: try: out_image = filename_constructor(info=info, folder="mc", mc=True) df.plot(lw=3, color='r') for pdf in pdfm: pdf.plot() plt.savefig(out_image) plt.close() img = "{0}=={1}=={2}=={3}=={4}.png".format(info["broker"], info["symbol"], \ info["period"], info["system"], info["direction"]) stats = Stats.objects.get(broker__slug=info["broker"], symbol__symbol=info["symbol"], \ period__period=info["period"], system__title=info["system"], direction=job.direction) stats.mc = "https://quantrade.co.uk/static/collector/images/mc/" + img stats.save() print(colored.green("Image saved {}.".format(img))) except Exception as err: print(colored.red("img_writer {}".format(err)))
def run_by_id(qids): """ command: `leetcode-dl -q qid`""" puts('====== Leetcode-dl ======') if not os.path.exists(CONFIG_FILE): puts( colored.yellow( 'Can\'t load config.\nPlease Set config file First')) cli_set_config() puts() puts( colored.green( 'Now you can start download, just type `leetcode-dl` again')) else: config = get_config_from_file(CONFIG_FILE) puts('The solutions will download to folder:' + config['dist']) puts() leetcode = Leetcode(config) download(leetcode, qids)
def apply(self): self.puts(colored.blue("Applying project...")) context = self.get_initial_context() context.update(self.collect_parameters()) for (number, name, filename, template_type) in self.steps(): with indent(2): puts(colored.cyan("{} ({})".format(filename, template_type))) with indent(4): if self.debug: puts(colored.white(u"✸ Applying template {} with context {}".format(filename, context))) getattr(self, 'apply_{}_template'.format(template_type))(name, filename, context) self.puts(colored.blue("Project Outputs:")) for k, v in six.iteritems(context): if k.startswith('Clioutput'): with indent(2): puts(colored.cyan(k[9:])) with indent(4): puts(colored.green(v))
def init(contestName,fileNames): # create a directory with contest name try: print(colored.green('Make some files and folders for ' + colored.magenta(contestName))) path = os.getcwd() os.mkdir(path + '/' + contestName) except OSError: print("Failed! This directory already exists.") else: print(colored.yellow('Directory is made')) # create files for contest (should be 6 cpp files) for files in range(len(fileNames)): f = open(path + '/' + contestName + '/' + fileNames[files],"w+") template = template_cpp() f.write(template) f.close() # create input file f = open(path + '/' + contestName + '/' + "input.txt","w+") f.close() print(colored.cyan('Files have been created'))
def convert_mt_one(sym): periods = [5, 15, 30, 60, 240, 1440, 10080, 43200] for p in periods: try: f = join(META_PATHS[0], 'DATA_MODEL_Ava Trade EU Ltd._{}_{}.csv'.format(sym, p)) data = get_mt(f, which=0) data.rename(columns={ 'OPEN': 'Open', 'HIGH': 'High', 'LOW': 'Low', 'CLOSE': 'Close', 'VOLUME': 'Volume' }, inplace=True) dest_path = join(STORAGE_PATH, 'mt', '0', '{}_{}.p'.format(sym, p)) data.to_pickle(dest_path) print(colored.green('Converted for {} {}'.format(sym, p))) except Exception as err: print(colored.red(err))
def watchAnime(name): entry = getEntry(name, ANIME_DB) name = entry[0] episode = entry[1] + 1 conn = sqlite3.connect(DATABASE_URL) c = conn.cursor() t = ( episode, name, ) sql = "UPDATE {tn} SET Episode = ? WHERE Name = ?".format(tn=ANIME_DB) c.execute(sql, t) conn.commit() conn.close() print("") puts(colored.yellow("Update:\n")) with indent(4): puts(colored.magenta("Name: %s" % (name))) puts(colored.green("Episode: %s\n" % (episode)))
def handle_REGISTER(self, name): if name in self.factory.users: self.sendLine("Sorry, %r is taken. Try something else." % name) return welcomeMsg = ('Welcome to the chat, %s.' % (name, )) joinedMsg = colored.green('%s has joined the chanel.' % (name, )) self.sendLine(welcomeMsg) self.broadcastMessage(joinedMsg) self.name = name self.factory.users[name] = self self.state = "CHAT" self.updateSessionInfo() if len(self.factory.users) > 1: self.sendLine('Participants in chat: %s ' % (", ".join(self.factory.users))) else: self.sendLine("You're the only one here, %r" % name)
def testPublicReadBucketACL(idx, bucket, region): puts( colored.white('[Test ' + idx + '] Checking bucket ACL read permission (public)')) with indent(4, quote=' '): bucketACL = unauth.getBucketACL(bucket, region) if bucketACL is None: puts( colored.green( 'Permission denied for reading public bucket ACL')) return {"testresult": "pass", "testdata": None} else: url = "https://" + region + ".amazonaws.com/" + bucket + "/?acl" puts(colored.red("Public users may read this bucket's ACL")) puts( colored.red( str(len(bucketACL)) + " ACL grants found attached to this bucket")) puts(colored.red("Nav here to see the ACL: " + url)) return {"testresult": "fail", "testdata": bucketACL}
def createnewnote(docopt_args): notebody = "" if docopt_args["<note_title>"] and docopt_args["-m"]: with indent(4, quote=' >'): # puts( # colored.yellow('Type the body of the notes.Press "/pq" to save & exit')) print(Back.YELLOW + Fore.RED + 'Type the body of the notes' + Back.RESET + Fore.RESET + Style.BRIGHT + ' (Press ' + Back.YELLOW + Fore.RED + "/pq" + Back.RESET + Fore.RESET + ' to save & exit)' + Style.NORMAL + Fore.GREEN) sentinel = '/pq' # ends when this string is seen for line in iter(raw_input, sentinel): notebody += line + "\n" print(Fore.RESET) notetitle = docopt_args["<note_title>"] note = NoteOperations() note.save(title=notetitle, body=notebody) with indent(4, quote='√ '): puts(colored.green("Successfully saved")) note.synctocloud()
def get_anime_episode_urls(url, data): links = requests.post(url, data=data) try: links = list(links.json().values()) while "none" in links: links.remove("none") resp_url = links[0] episode_download_url = resp_url # Grab redirect url if it exists try: redirect_message = "" redirected_url = requests.head(resp_url).headers if 'Location' in redirected_url: episode_download_url = redirected_url['Location'] redirect_message = " -> " + colored.green("Redirect") + ": " + episode_download_url except: pass print(">> " + colored.yellow("URL") + ": " + resp_url + redirect_message) return episode_download_url except: print("Failed: {}".format(data))
def init(contestName, fileNames): # create a directory with contest name try: print( colored.blue('Making files and folders for ' + colored.magenta(contestName))) os.makedirs(os.path.join(os.getcwd(), contestName)) except OSError: print( colored.red( "Failed! This directory cannot be created as it exists.")) else: print(colored.yellow('Directory is made')) # create files for contest (should be 6 cpp files) for files in range(len(fileNames)): write_to_file(fileNames[files], template_cp(), contestName) # create input file write_to_file('input.txt', '', contestName) write_to_file('output.txt', '', contestName) print(colored.green('Files have been created. Happy Coding!'))
def fix_check(klass, buckets, dry_run, fix_args={}): for bucket in fetch_buckets(buckets): check = klass(bucket) check.perform() if check.status == 'passed': message = colored.green('already enabled') elif check.status == 'denied': message = colored.red('access denied') else: if dry_run: message = colored.yellow('to be enabled') else: try: check.fix(fix_args) message = colored.blue('just enabled') except botocore.exceptions.ClientError as e: message = colored.red(str(e)) puts(bucket.name + ' ' + message)
def stream_logs(): conf = Config() # Request the correct streamer URL from siphon-web auth = Auth() siphon = Siphon(auth.auth_token) # Track mixpanel_event(MIXPANEL_EVENT_LOGS, properties={'app_id': conf.app_id}) streamer_url = siphon.get_streamer_url(conf.app_id, 'log_reader') puts(colored.yellow('Connecting...')) ws = websocket.create_connection(streamer_url) puts(colored.green('Streaming logs and errors... (ctrl-c to stop)\n')) try: for line in ws: print(line) except KeyboardInterrupt: puts(colored.yellow('\nClosing the connection.')) ws.close()
def delete_unencrypted_version(bucket_name, key, id, dry_run): object_version = s3().ObjectVersion(bucket_name, key, id) try: obj = object_version.get() if obj.get('ServerSideEncryption') or obj.get('SSECustomerAlgorithm'): puts(key + ' ' + id + ' ' + colored.green('encrypted')) return 'encrypted' else: if dry_run: puts(key + ' ' + id + ' ' + colored.blue('to be deleted')) return 'to be deleted' else: puts(key + ' ' + id + ' ' + colored.blue('deleted')) object_version.delete() return 'deleted' except (botocore.exceptions.ClientError, botocore.exceptions.NoCredentialsError) as e: puts(key + ' ' + id + ' ' + colored.red(str(e))) return 'error'
def describe(migration): puts(u'\nMembership') with indent(): puts(get_membership(migration.target_couch_config).get_printable()) puts(u'\nDB Info') print_db_info(migration.target_couch_config) puts(u'\nShard allocation') diff_with_db = diff_plan(migration) if diff_with_db: puts(colored.yellow('DB allocation differs from plan:\n')) puts("{}\n\n".format(diff_with_db)) else: puts(colored.green('DB allocation matches plan.')) print_shard_table([ get_shard_allocation(migration.target_couch_config, db_name) for db_name in sorted( get_db_list(migration.target_couch_config.get_control_node())) ]) return 0
def selenium_subdomain(self): self.driver.get(f"{self.url}/sys/tools/subdomainfinder/") time.sleep(1) self.driver.find_element_by_xpath("//*[@id='Enumhost']").send_keys(f"{self.address}") time.sleep(1) self.driver.find_element_by_xpath("//*[@id='start-scan']").click() time.sleep(2) subdomain_scanner = self.driver.find_elements_by_id("subdomain-tbody") for subdomains in subdomain_scanner: if "OOps nothing found" in subdomains.text: sys.stdout.write(str(colored.red("[-] Sub Domain Not Found !"))) sys.stdout.flush() else: print(colored.red("[ + HOST ]") + " " + colored.yellow("[ + SubDomain ]") + " " + colored.green("[ + IP ]") + " " + colored.magenta("[ + ASN ]")) print(colored.green(subdomains.text)) time.sleep(1) self.driver.close()
def scanBucket(bucket, invasive, fileSearch, objectScan): puts(colored.white('Scan Target: ') + colored.green('s3://' + bucket)) with indent(4, quote=colored.cyan(' |')): bucketObjects, bucketACL, readableObjects, objectACLs, writeResults, fileSearchResults = ( None, None, None, None, None, None) region = unauth_noninvasive.getBucketRegion(bucket) if region is not None: idx = 1 bucketObjects = unauth_noninvasive.testPublicListBucket( str(idx), bucket, region) idx += 1 bucketACL = unauth_noninvasive.testPublicReadBucketACL( str(idx), bucket, region) idx += 1 readableObjects = unauth_noninvasive.testPublicReadObject( str(idx), bucket, region, bucketObjects["testdata"], objectScan) idx += 1 objectACLs = unauth_noninvasive.testPublicReadObjectACL( str(idx), bucket, region, bucketObjects["testdata"], objectScan) idx += 1 writeResults = unauth_invasive.testPublicObjectWrite( str(idx), bucket, region, invasive) idx += 1 fileSearchResults = filesearch.fileSearchTest( str(idx), bucket, region, bucketObjects["testdata"], fileSearch) puts(colored.white('Finished scanning bucket s3://' + bucket)) return { "bucket": bucket, "region": region, "objects": bucketObjects, "bucketacl": bucketACL, "readableObjects": readableObjects, "objectACLs": objectACLs, "writeResults": writeResults, "fileSearchResults": fileSearchResults }
def tarbell_install_template(args): """Install a project template.""" with ensure_settings(args) as settings: template_url = args.get(0) matches = [ template for template in settings.config["project_templates"] if template["url"] == template_url ] if matches: puts("\n{0} already exists. Nothing more to do.\n".format( colored.yellow(template_url))) sys.exit() puts("\nInstalling {0}".format(colored.cyan(template_url))) tempdir = tempfile.mkdtemp() puts("\n- Cloning repo to {0}".format(colored.green(tempdir))) Repo.clone_from(template_url, tempdir) base_path = os.path.join(tempdir, "_base/") filename, pathname, description = imp.find_module('base', [base_path]) base = imp.load_module('base', filename, pathname, description) puts("\n- Found _base/base.py") try: name = base.NAME puts("\n- Name specified in base.py: {0}".format( colored.yellow(name))) except AttributeError: name = template_url.split("/")[-1] puts("\n- No name specified in base.py, using '{0}'".format( colored.yellow(name))) settings.config["project_templates"].append({ "name": name, "url": template_url }) settings.save() _delete_dir(tempdir) puts("\n+ Added new project template: {0}".format( colored.yellow(name)))
def category_selection(): """ Asks the user to choose in which category to look for a product. If the user enters a wrong choice, asks to choose again (recursively). :return categories[category_number]: chosen category. """ puts( colored.green( "Rentrez le numéro de la catégorie choisie pour accéder aux produits : " )) # getting categories from the database categories = [] for element in Category.objects.get_categories(): categories.append(element['name']) # asking the user to choose for i in range(len(categories)): with indent(4): puts(colored.blue(str(i + 1) + ' : ' + categories[i])) category_number = int(prompt.query(" ")) try: category_number = category_number - 1 except TypeError: # asking again in case of wrong choice puts( colored.red( "Attention : vous devez rentrer un nombre de la liste de catégories" )) category_selection() else: if category_number in range(len(categories)): return categories[category_number] else: # asking again in case of wrong choice puts( colored.red( "Attention : vous devez rentrer un nombre de la liste de catégories" )) category_selection()
def start(self): # 执行文档处理过程,处理结束后保存 for block in self.iter_block_items(self.document): if isinstance(block, Paragraph): self.process_paragraph(block) elif isinstance(block, Table): self.process_table(block) inst_options = [{ 'selector': '1', 'prompt': '确认保存', 'return': False }, { 'selector': '2', 'prompt': '退出/不保存', 'return': True }] puts(colored.yellow('=' * 50)) quit = prompt.options('以上内容将发生改变,请确认:', inst_options) if quit: puts(colored.green('变更未作保存。')) exit(0) while True: try: self.document.save(self.file) # 保存处理完毕的文件 puts(colored.red('变更已保存。')) exit(0) except PermissionError as e: print(e) inst_options = [{ 'selector': '1', 'prompt': '重试', 'return': False }, { 'selector': '2', 'prompt': '退出/不保存', 'return': True }] quit = prompt.options('文件是否已经打开?请关闭后重试。', inst_options) if quit: exit(0)
def tarbell_list(command, args): """ List tarbell projects. """ with ensure_settings(command, args) as settings: projects_path = settings.config.get("projects_path") if not projects_path: show_error("{0} does not exist".format(projects_path)) sys.exit() puts("Listing projects in {0}\n".format(colored.yellow(projects_path))) longest_title = 0 projects = [] for directory in os.listdir(projects_path): project_path = os.path.join(projects_path, directory) try: filename, pathname, description = imp.find_module( 'tarbell_config', [project_path]) config = imp.load_module(directory, filename, pathname, description) title = config.DEFAULT_CONTEXT.get("title", directory) projects.append((directory, title)) if len(title) > longest_title: longest_title = len(title) except ImportError: pass if len(projects): fmt = "{0: <" + str(longest_title + 1) + "} {1}" puts(fmt.format('title', 'project name')) for projectname, title in projects: title = codecs.encode(title, 'utf8') puts( colored.yellow(fmt.format(title, colored.cyan(projectname)))) puts("\nUse {0} to switch to a project".format( colored.green("tarbell switch <project name>"))) else: puts("No projects found")
def issueAndActivateBounty(state, ipfsHash): web3 = web3_client(state.get('network')) bountiesContract = getBountiesContract(state.get('network')) # build transaction tx = bountiesContract.functions.issueAndActivateBounty( state.get('wallet').get('address'), 9999999999, # 11/20/2286, https://github.com/Bounties-Network/StandardBounties/issues/25 ipfsHash, state.get('amount'), '0x0000000000000000000000000000000000000000', state.get('token_address') != '0x0000000000000000000000000000000000000000', to_checksum_address(state.get('token_address')), state.get('amount')).buildTransaction({ 'from': state.get('wallet').get('address'), 'value': state.get('amount') if state.get('token_address') == '0x0000000000000000000000000000000000000000' else 0, 'gasPrice': web3.toWei(state.get('gas_price'), 'gwei'), 'gas': state.get('gas_limit'), 'nonce': web3.eth.getTransactionCount(state.get('wallet').get('address')) }) signed = web3.eth.account.signTransaction( tx, private_key=state.get('wallet').get('private_key')) old_id = bountiesContract.functions.getNumBounties().call() # send transaction and wait for receipt print('Funding bounty... ', end='', flush=True) receipt = web3.eth.waitForTransactionReceipt( web3.eth.sendRawTransaction(signed.rawTransaction)) new_id = bountiesContract.functions.getNumBounties().call() puts(colored.green(web3.toHex(receipt.transactionHash))) return old_id < new_id, old_id
def status_daemon(): """ Method for showing the status of the daemon at the daemon CLI :return: """ llogger = local_logger.LocalLogger() llogger.log_call(sys._getframe().f_code.co_name) params = CMDParser(program_path="daemon status", description="Print the daemon status.", arguments=[Argument.INTERFACE]).parse_arguments() try: network_info = network.NetworkInfo(params.interface) except NetworkException: puts( colored.red( "Host interface not valid. Specify a different host interface." )) puts( colored.red("Possible options are: " + " ".join(network.get_interface_list()))) llogger.debug( "Missing host interface. Add one with option --interface.") return False try: proxy = pyro_interface.get_daemon_proxy(network_info.ip_address) if proxy.is_daemon_running(): daemon_mode = jsonpickle.decode(proxy.get_mode()) puts( colored.green("Daemon is running and in mode: " + str(daemon_mode.value))) llogger.debug("Status Daemon: Daemon is running in mode: %s", str(daemon_mode.value)) return True else: puts(colored.red("Daemon is not running")) llogger.debug("Status Daemon: Daemon is not running") except NetworkException as e: puts(colored.red(str(e))) return False