def branch(): branches_yaml_path = path(ctx.user_config.inputs['branches_file']) branches_yaml = yaml.safe_load(branches_yaml_path.text()) for instance in ctx.env.storage.get_node_instances(): active_branch_set = instance.runtime_properties.get( 'active_branch_set') if active_branch_set: active_branch_set = active_branch_set['name'] break else: active_branch_set = None for name, branch_set in branches_yaml.items(): indicator = '*' if name == active_branch_set else ' ' print '{} {}:'.format(colors.red(indicator), colors.magenta(name)) indent = ' ' * 4 branch = branch_set.get('branch') if branch: print '{}branch: {}'.format(indent, colors.green(branch)) base = branch_set.get('base') if base: print '{}base: {}'.format(indent, colors.green(base)) repos = branch_set.get('repos') if repos: print '{}repos:'.format(indent) if isinstance(repos, list): for repo in repos: print '{}- {}'.format(indent, colors.green(repo)) else: for repo, branch in repos.items(): print '{} {}: {}'.format(indent, colors.green(repo), colors.green(branch)) print
def print_msg(code, msg): if code == -1: print red("[ERROR]\t" + msg) elif code == 1: print yellow("[WARNING]\t"+msg) else: print green("[INFO]\t" + msg)
def test_print_log_entry(self): stdout = StringIO() with patch('sys.stdout', stdout): bunga.client.print_log_entry( '[ 0.141804] imx-sdma 20ec000.sdma: Direct firmware load for ' 'imx/sdma/sdma-imx6q.bin failed with error -2', sys.stdout) bunga.client.print_log_entry( '[ 0.141826] imx-sdma 20ec000.sdma: external firmware not ' 'found, using ROM firmware', sys.stdout) bunga.client.print_log_entry( '[ 2.497619] 1970-01-01 00:00:02 INFO dhcp_client State change ' 'from INIT to SELECTING.', sys.stdout) self.assertEqual( stdout.getvalue(), green('[ 0.141804]') + red( ' imx-sdma 20ec000.sdma: Direct firmware load for ' 'imx/sdma/sdma-imx6q.bin failed with error -2', style='bold') + '\n' + green('[ 0.141826]') + (' imx-sdma 20ec000.sdma: external firmware not found, using ROM ' 'firmware\n') + green('[ 2.497619]') + yellow(' 1970-01-01 00:00:02') + ' INFO' + color(' dhcp_client', 'grey') + ' State change from INIT to SELECTING.\n')
def test_tresholds(interpreter, cases): for utter, rules in cases.items(): print(yellow("case: %s" % rules['case'])) result = interpreter.parse(utter) intent, _, result_entities = extract(result) if not intent: print( red("INTENT DETECTION FAILED: expecting %s, found %s" % (rules['intent'], None))) elif intent['name'] != rules['intent']: print( red("INTENT DETECTION FAILED: expecting %s, found %s" % (rules['intent'], intent['name']))) print(green('result intents: %s' % intent)) print(green('result entities: %s' % result_entities)) for test_entity_data in rules['entity']['ner_crf']: print('searching entity: %s with treshold %s' % (test_entity_data['name'], test_entity_data['confidence'])) for res_ent in result_entities: if res_ent['entity'] == test_entity_data['name']: if res_ent['value'] != test_entity_data['value']: print( red("FAILED PREDICTION: expecting %s -> %s, found %s -> %s" % (test_entity_data['name'], test_entity_data['value'], res_ent['entity'], res_ent['value']))) elif res_ent['confidence'] < test_entity_data['confidence']: print( red("FAILED PREDICTION: %s prediction is %s that is below treshold %s" % (test_entity_data['name'], res_ent['confidence'], test_entity_data['confidence'])))
def restcall(**kwargs): print colors.faint('=' * 120) r = request(**kwargs) print colors.green(r.request.method + ' ' + r.request.url) print_headers(r.request.headers) print if 'data' in kwargs: # print colors.faint('Form data:') # print_headers(kwargs['data']) print kwargs['data'] elif 'json' in kwargs: print_json(kwargs['json']) else: print r.request.body print print colors.faint('({:.3f} seconds)'.format(r.elapsed.total_seconds())) print if r.status_code < 300: color = colors.green elif r.status_code < 400: color = colors.yellow else: color = colors.red print color('{} {}'.format(r.status_code, r.reason)) print_headers(r.headers, format_names=True) print try: print_json(r.json()) except: print r.text return r
def webNMAP(): infile = raw_input("Name of IP file: ") scanner = nmap.PortScanner() with open(infile, 'r') as ips: for ip in ips: print green("[+] Scanning for web ports listed in /setup/web_ports.txt") with open('setup/web_ports.txt','r') as ports: for port in ports: print "[+] Scanning for port %s" % port scanner.scan(hosts=ip,ports=port,arguments="-sS -sV -T4 --open") for host in scanner.all_hosts(): print('\nHost: %s (%s)' % (host, scanner[host].hostname())) print('State: %s' % scanner[host].state()) for proto in scanner[host].all_protocols(): print('----------') print('Protocol: %s' % proto) lport = scanner[host][proto].keys() lport.sort() for port in lport: print ('Port: %s\tstate: %s' % (port, scanner[host][proto][port]['state'])) banner = retBanner(host,port) try: print ('Banner: %s' % banner.rstrip('\n')) except: pass
def version_exacta67(req, target, version, verbose, reporte, archivo): try: objetivo = target + "/modules/system/system.info" texto = req.get(objetivo).text regex = 'version = "(\d+\.\d+)"' pattern = re.compile(regex) version_x = re.findall(pattern, texto) if version_x: print colors.green( '[*] ' ) + "Version expecifica: %s\n" % version_x[0] if verbose else '', reporte.append("Version expecifica: %s<br/>" % version_x[0]) actualizado(req, version_x[0], verbose, reporte, archivo) return True objetivo = target + "/CHANGELOG.txt" texto = req.get(objetivo).text regex = 'Drupal (\d+\.\d+),' pattern = re.compile(regex) version_x = re.findall(pattern, texto) if version_x: print colors.green( '[*] ' ) + "Version expecifica: %s\n" % version_x[0] if verbose else '', reporte.append("Version expecifica: %s<br/>" % version_x[0]) actualizado(req, version_x[0], verbose, reporte, archivo) return True if version == 7: versiones_posibles(req, target, verbose, reporte, archivo) elif version == 6: return False except: print colors.red( '[*] ' ) + "No se pudo obtener la version del core\n" if verbose else '', reporte.append("No se pudo obtener la version del core<br/>")
def shodan(self, scope_file, output): """ The Shodan module:\n Look-up information on the target IP address(es) using Shodan's API.\n A Shodan API key is required. """ asciis.print_art() print( green( "[+] Shodan Module Selected: O.D.I.N. will check Shodan for the provided domains \ and IPs.")) if __name__ == "__main__": report = reporter.Reporter(output) scope, ip_list, domains_list = report.prepare_scope(scope_file) # Create empty job queue jobs = [] shodan_report = multiprocess.Process(name="Shodan Report", target=report.create_shodan_table, args=(ip_list, domains_list)) jobs.append(shodan_report) for job in jobs: print(green("[+] Starting new process: {}".format(job.name))) job.start() for job in jobs: job.join() report.close_out_reporting() print(green("[+] Job's done! Your results are in {}.".format(output)))
def set_perms(header): """ Hardcoded groupIDs and perms """ payload = [{ 'group_id': '36124', 'permission': 'admin' }, { 'group_id': '41497', 'permission': 'write' }, { 'group_id': '45777', 'permission': 'read' }, { 'group_id': '36180', 'permission': 'write' }] for group_perms in payload: try: r = requests.post(base_url + "/repositories/" + org + "/" + new_repo + "/groups/", json=group_perms, headers=header) r.raise_for_status() except requests.exceptions.HTTPError as err: print red("Setting perms failed: {}").format(err) sys.exit(1) print green("All perms set.")
def execute(self, **kwargs): # NB: kwargs are for testing and pass-through to underlying subprocess process spawning. go_targets = OrderedSet(target for target in self.context.target_roots if self.is_go(target)) args = self.get_passthru_args() if not go_targets or not args: msg = (yellow( 'The pants `{goal}` goal expects at least one go target and at least one ' 'pass-through argument to be specified, call with:\n') + green(' ./pants {goal} {targets} -- {args}').format( goal=self.options_scope, targets=(green(' '.join(t.address.reference() for t in go_targets)) if go_targets else red('[missing go targets]')), args=green(' '.join(args)) if args else red('[missing pass-through args]'))) raise self.MissingArgsError(msg) go_path = OrderedSet() import_paths = OrderedSet() for target in go_targets: self.ensure_workspace(target) go_path.add(self.get_gopath(target)) import_paths.add(target.import_path) self.execute_with_go_env(os.pathsep.join(go_path), list(import_paths), args, **kwargs)
def run_urlvoid_lookup(self, domain): """Collect reputation data from URLVoid for the target domain. This returns an ElementTree object. A free API key is required. """ if not helpers.is_ip(domain): try: if self.urlvoid_api_key != "": print( green("[+] Checking reputation for {} with URLVoid". format(domain))) url = "http://api.urlvoid.com/api1000/{}/host/{}"\ .format(self.urlvoid_api_key, domain) response = requests.get(url) tree = ET.fromstring(response.content) return tree else: print( green( "[-] No URLVoid API key, so skipping this test.")) return None except Exception as error: print(red("[!] Could not load URLVoid for reputation check!")) print(red("L.. Details: {}".format(error))) return None else: print( red("[!] Target is not a domain, so skipping URLVoid queries.") )
def spammerv2(): time.sleep(2) print(red()) print(""" █▀ █▀█ ▄▀█ █▀▄▀█ █▀▄▀█ █▀▀ █▀█   █░█ ▀█ ▄█ █▀▀ █▀█ █░▀░█ █░▀░█ ██▄ █▀▄   ▀▄▀ █▄ \n\nby Alfo """) print("[+] The spam is starting...", white()) print("[?] If you want to change the word write them in spammer.txt", white()) print("Good Trolling, by alfo", green()) try: f = open("spammer.txt", 'r') except IOError: print("[-] No file named spammer.txt creating new", red()) f = open("spammer.txt", "x") for word in f: keyboard.write(word, 0.01) keyboard.press('enter') time.sleep(0.5) print("[+] Succesfully Writed" + " " + word, green())
def sshdo(args, sshdo_dir): servers = raw_input('Servers: ') command = raw_input('Command: ') #if not os.path.exists('logs'+datetime): os.makedirs('logs/'+datetime) if not os.path.exists("{0}/logs/{1}".format(sshdo_dir, log_dir)): os.makedirs("{0}/logs/{1}".format(sshdo_dir, log_dir)) list = server_list(servers) fail_to_connect = [] for host in list: try: ssh.connect(host, username='******', allow_agent=True, timeout=5) except Exception: print red(host + "\n Could not connect \n") fail_to_connect.append(host) else: # log_f = open('logs/'+datetime+'/'+host+'.log', 'w') log_f = open( "{0}/logs/{1}/{2}.log".format(sshdo_dir, log_dir, host), 'w') log_f.write(command + "/n/n") if args.debug: paramiko.util.log_to_file( "{0}/logs/debug.log".format(sshdo_dir)) print green("\n" + host + "\n") stdin, stdout, stderr = ssh.exec_command(command) for data_line in stdout: print data_line.rstrip() log_f.write(data_line) log_f.close() ssh.close() if len(fail_to_connect) > 0: print red("Failed to connect to these hosts %s" % (fail_to_connect)) print "Log files are in logs/%s" % (log_dir) return
def sshget(): #prompt for info servers = raw_input('Servers: ') remote_file = raw_input('Remote File: ') local_dir = raw_input('Local Drop Directory: ') remote_base_file = os.path.basename(remote_file) list = server_list(servers) fail_to_connect = [] for host in list: try: ssh.connect(host, username='******', allow_agent=True, timeout=5) except Exception: print red(host + "\n Could not connect \n") fail_to_connect.append(host) else: print green("\n" + host + "\n") local_file = local_dir + host + "." + remote_base_file sftp = ssh.open_sftp() sftp.get(remote_file,local_file) ssh.close() if len(fail_to_connect) > 0: print red("Failed to connect to these hosts %s" % (fail_to_connect)) return "sshget"
def get_chest(): global triangle global square global healthp global chest comb = ['2', '4', '1', '3'] ans = [] print "The chest is locked and has four colored switches on it:\n" print green("[1GREEN]"), yellow("[2YELLOW]"), red("[3RED]"), blue("[4BLUE]\n") for nums in comb: press = raw_input('Which switch should %s press? ' % name) ans.append(press) if ans == comb: pause('+++') print "The chest suddenly pops open! There is a hexagon key and a triangle key...\n" choo = raw_input('Which one should %s take? ' % name) if choo in ('t', 'triangle', 'the triangle'): pause('...') print "%s takes the triangle shaped key and the chest snaps shut!\n" % name triangle = True chest = False elif choo in ('h', 'hexagon', 'the hexagon'): pause('...') print "%s takes the hexagon shaped key and the chest snaps shut!\n" % name square = True chest = False else: pause('...') print "The chest snaps shut!\n" get_chest() else: print "\nThe chest shocks %s!" % name health(-10) room2()
def getDataSparse(self,file): """ Get Data From .npz File output X,Y: sparse row matrix """ from sklearn import preprocessing self.logger.info(' Load in data from .npz file "'+ str(file)+'"') t0 = perf_counter() # timer X = sparse.load_npz(file) self.logger.info(' Loading data is finished with success.') self.logger.info(green(' Time taken : ') + red(str(round(perf_counter()-t0,5))) + " " +green("Sec.") + ", X.shape =" + red(str(X.shape))) print(" +++++++++ ", X.shape) print(X[0,:].todense()) Y = X[:,X.shape[1]-1] X = X[:,0:(X.shape[1]-1)] # input normalization if Cfg.Normalize: X = preprocessing.normalize(X,axis=0, norm='l2') self.logger.info('Using data normalization...') return X,Y
def print_response(self, res, res_body): res_header_text = "%s %d %s\n%s" % (res.response_version, res.status, res.reason, res.headers) print('\n----------------------------\n') print(green(res_header_text)) res_body_text = ' ' if res_body is not None: content_type = res.headers.get('Content-Type', '') if content_type.startswith('application/json'): try: json_obj = json.loads(res_body.decode()) json_str = json.dumps(json_obj, indent=2) if json_str.count('\n') < 50: res_body_text = json_str else: lines = json_str.splitlines() res_body_text = "%s\n(%d lines)" % ('\n'.join( lines[:50]), len(lines)) except ValueError: res_body_text = res_body.decode() elif content_type.startswith('text/'): res_body_text = res_body.decode() if res_body_text != '': print(green(res_body_text))
def sshdo(args, sshdo_dir): servers = raw_input('Servers: ') command = raw_input('Command: ') #if not os.path.exists('logs'+datetime): os.makedirs('logs/'+datetime) if not os.path.exists("{0}/logs/{1}".format(sshdo_dir, log_dir)): os.makedirs("{0}/logs/{1}".format(sshdo_dir, log_dir)) list = server_list(servers) fail_to_connect = [] for host in list: try: ssh.connect(host, username='******', allow_agent=True, timeout=5) except Exception: print red(host + "\n Could not connect \n") fail_to_connect.append(host) else: # log_f = open('logs/'+datetime+'/'+host+'.log', 'w') log_f = open("{0}/logs/{1}/{2}.log".format(sshdo_dir, log_dir, host), 'w') log_f.write(command+"/n/n") if args.debug: paramiko.util.log_to_file("{0}/logs/debug.log".format(sshdo_dir)) print green("\n" + host + "\n") stdin, stdout, stderr = ssh.exec_command(command) for data_line in stdout: print data_line.rstrip() log_f.write(data_line) log_f.close() ssh.close() if len(fail_to_connect) > 0: print red("Failed to connect to these hosts %s" % (fail_to_connect)) print "Log files are in logs/%s" % (log_dir) return
def sshput(): print "sshput block" #prompt for info servers = raw_input('Servers: ') local_file = raw_input('Local File or Direcotry: ') remote_dir = raw_input('Remote Location: ') remote_base_file = os.path.basename(local_file) remote_file = remote_dir + remote_base_file list = server_list(servers) fail_to_connect = [] for host in list: try: ssh.connect(host, username='******', allow_agent=True, timeout=5) except Exception: print red(host + "\n Could not connect \n") fail_to_connect.append(host) else: print green("\n" + host + "\n") sftp = ssh.open_sftp() sftp.put(local_file,remote_file,confirm=True) ssh.close() if len(fail_to_connect) > 0: print red("Failed to connect to these hosts %s" % (fail_to_connect)) return "sshput"
def singleivpsmipipsettinggroup(ivpid): ipgroup = [] for ge in range(4): #print('ge is '+str(ge)+r.get(str(ivpid)+'smipge'+str(ge+1)+'ip')) ipgroup.append(r.get(str(ivpid) + 'smipge' + str(ge + 1) + 'ip')) print green(str(ipgroup)) return ipgroup
def get_problem_input(n): if not os.path.exists(n + '.in') or (os.path.exists(n + '.in') and not open(n + '.in').read().strip()): link = 'https://adventofcode.com/' + event + '/day/' + n + '/input' data = requests.get(link, cookies=cookie).text if "Please don't repeatedly request this endpoint before it unlocks!" in data: print(red('Input not available!', bg='black')) else: with open(n + '.in', 'w') as f: f.write(data.strip('\n')) print( green('Data saved in Input file ' + n + '.in!', bg='black')) else: print(red('Input file ' + n + '.in' + ' already exits!', bg='black')) template = open('../template.js', 'r').read() for l in ['A', 'B']: if not os.path.exists(n + l + '.ts'): with open(n + l + '.ts', 'w') as f: f.write(template.format(n)) print( green('Typescript file ' + n + l + '.ts generated!', bg='black')) else: print( red('Typescript file ' + n + l + '.ts' + ' already exits!', bg='black'))
def check_config(self): if os.path.isfile("config.json"): print(green("> Config found!")) else: print(blue("> Config not found! Creating one..")) self.create_config() print(green("> Config created!"))
def post_process(self, this, num, max_length): """ Colorizes items in print display (1) Makes most recently modified item cyan (2) Any directory yellow (3) Symlinks green (4) Files are left unchanged """ verbose = False verbose and print("in post process: {}".format(this)) item = str(this) if not this.is_mounted: strikeout = '☓' * len(str(num)) return (strikeout, green(item)) if this.default and this.force_default: return ( cyan(num), cyan(item) ) if this.default: return ( cyan(num), cyan(item) ) if this.is_directory: return ( num, yellow(item) ) if this.is_link: return ( num, green(item) ) else: # is file pass ffile, ext = os.path.splitext(item) if ext: return ( num, green(ffile + ext) ) else: return ( num, green(item) )
def sshput(): print "sshput block" #prompt for info servers = raw_input('Servers: ') local_file = raw_input('Local File or Direcotry: ') remote_dir = raw_input('Remote Location: ') remote_base_file = os.path.basename(local_file) remote_file = remote_dir + remote_base_file list = server_list(servers) fail_to_connect = [] for host in list: try: ssh.connect(host, username='******', allow_agent=True, timeout=5) except Exception: print red(host + "\n Could not connect \n") fail_to_connect.append(host) else: print green("\n" + host + "\n") sftp = ssh.open_sftp() sftp.put(local_file, remote_file, confirm=True) ssh.close() if len(fail_to_connect) > 0: print red("Failed to connect to these hosts %s" % (fail_to_connect)) return "sshput"
def libs(): verb = input("Type in a verb: ") word_list.append(verb) noun = input("Type in a noun: ") word_list.append(noun) pronoun = input("Type in a pronoun: ") word_list.append(pronoun) adjective = input("Type in an adjective: ") word_list.append(adjective) print(red('verb:red')) print(green('noun:green')) print(blue('pronoun:blue')) print(yellow('adjective:yellow')) print("I {} some {} then I've seen {}. I didn't realize it's {}".format( red(verb), green(noun), blue(pronoun), yellow(adjective))) inputs = input('Enter R to show results: ') if inputs == 'r' or inputs == 'R': for list_item in word_list: print(magenta(list_item))
def sshget(): #prompt for info servers = raw_input('Servers: ') remote_file = raw_input('Remote File: ') local_dir = raw_input('Local Drop Directory: ') remote_base_file = os.path.basename(remote_file) list = server_list(servers) fail_to_connect = [] for host in list: try: ssh.connect(host, username='******', allow_agent=True, timeout=5) except Exception: print red(host + "\n Could not connect \n") fail_to_connect.append(host) else: print green("\n" + host + "\n") local_file = local_dir + host + "." + remote_base_file sftp = ssh.open_sftp() sftp.get(remote_file, local_file) ssh.close() if len(fail_to_connect) > 0: print red("Failed to connect to these hosts %s" % (fail_to_connect)) return "sshget"
def check_config_file(path): cp = Config.create_parser() with open(path, 'r') as ini: cp.readfp(ini) print('Checking config file at {0} for unmigrated keys.'.format(path), file=sys.stderr) def section(s): return cyan('[{0}]'.format(s)) for src, dst in migrations.items(): check_option(cp, src, dst) # Special-case handling of per-task subsystem options, so we can sweep them up in all # sections easily. def check_task_subsystem_options(subsystem_sec, options_map, sections=None): sections = sections or cp.sections() for src_sec in ['DEFAULT'] + sections: dst_sec = subsystem_sec if src_sec == 'DEFAULT' else '{}.{}'.format(subsystem_sec, src_sec) for src_key, dst_key in options_map.items(): check_option(cp, (src_sec, src_key), (dst_sec, dst_key)) artifact_cache_options_map = { 'read_from_artifact_cache': 'read', 'write_to_artifact_cache': 'write', 'overwrite_cache_artifacts': 'overwrite', 'read_artifact_caches': 'read_from', 'write_artifact_caches': 'write_to', 'cache_compression': 'compression_level', } check_task_subsystem_options('cache', artifact_cache_options_map) jvm_options_map = { 'jvm_options': 'options', 'args': 'program_args', 'debug': 'debug', 'debug_port': 'debug_port', 'debug_args': 'debug_args', } jvm_options_sections = [ 'repl.scala', 'test.junit', 'run.jvm', 'bench', 'doc.javadoc', 'doc.scaladoc' ] check_task_subsystem_options('jvm', jvm_options_map, sections=jvm_options_sections) # Check that all values are parseable. for sec in ['DEFAULT'] + cp.sections(): for key, value in cp.items(sec): value = value.strip() if value.startswith('['): try: custom_types.list_type(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON list.'.format(key=green(key), section=section(sec))) elif value.startswith('{'): try: custom_types.dict_type(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON object.'.format(key=green(key), section=section(sec)))
def get_repos(): """ Checks if $newrepo exists and bails out if it does, otherwise returns all the repos""" page = 1 repos = [] while (page != 0): param = {'page_size': 1000, 'page': page} try: r = requests.get(base_url + "/repositories/" + org + "/", headers=token, params=param) r.raise_for_status() except requests.exceptions.HTTPError as err: print red("Retrieving repos failed: {}").format(err) sys.exit(1) readable_json = r.json() next_page = str(r.json()['next']) for i in readable_json['results']: repos.append(str(i['name'])) if (next_page != "None"): page += 1 else: page = 0 print "Found %s repositories" % (len(repos)) if new_repo in repos: print green("Repository {0} already exists.").format(new_repo) sys.exit() else: print blue("Repo doesn't exist.") return repos
def decodersource(ivpid='test'): if ivpid == 'test': ivpid = request.args.get('ivpid') ip = parserip(str(ivpid)) ''' http://192.168.0.181/cgi-bin/boardcontroller.cgi?action=get&object=router&slotid=slot4&slotport=sub_in_0&id=0.0852252272940579 ''' ivpdecodergroup = ['slot4'] infogroup = [] for decoder in ivpdecodergroup: info = requests.get( 'http://' + str(ip) + '/cgi-bin/boardcontroller.cgi?action=get&object=router&slotid=' + str(decoder) + '&slotport=sub_in_0').text print 'http://' + str( ip ) + '/cgi-bin/boardcontroller.cgi?action=get&object=router&slotid=' + str( decoder) + 'slotport=sub_in_0' finalinfo = ast.literal_eval(info) print red(str(finalinfo)) try: print green('what happen=====================================>') lenoflist = len(finalinfo['Body']['Route_records']) for k in range(lenoflist): if 'slot6' in finalinfo['Body']['Route_records'][k]["src_id"]: print red('i am ther-------------------->') r.set( str(decoder) + 'correspondingsmip', finalinfo['Body']['Route_records'][k]["src_port"]) r.set( str(ivpid) + finalinfo['Body']['Route_records'][k]["src_port"], decoder) except: r.set('ivpidencodersmip' + str(decoder), '')
def search_best_model(self,data): """ Search the best model using hyperopt & stock best model input data: (X_train, X_val, X_test, y_train, y_val, y_test) """ from hyperopt import Trials, tpe,rand from hyperas import optim t0 = perf_counter() # timer best_run, best_model,space = optim.minimize(model= create_model, data=data, rseed=Cfg.RandomState, algo=tpe.suggest, max_evals=Cfg.NbTrials, trials=Trials(), eval_space=True, verbose=False, return_space=True ) self.model = best_model self.best_run = best_run self.logger.info(' best-run: '+cyan(str(best_run))) self.logger.info(green(' The serach of best model took: ') + red(str(round(perf_counter()-t0,5))) + " " +green(".s") )
def get_command(): print green("\nWhat do you do now? ") command = input_command() while command not in ['look', 'open', 'help', 'quit']: command = input_command() return command
def check_option(cp, src, dst): def has_explicit_option(section, key): # David tried to avoid poking into cp's guts in https://rbcommons.com/s/twitter/r/1451/ but # that approach fails for the important case of boolean options. Since this is a ~short term # tool and its highly likely its lifetime will be shorter than the time the private # ConfigParser_sections API we use here changes, it's worth the risk. if section == 'DEFAULT': # NB: The 'DEFAULT' section is not tracked via `has_section` or `_sections`, so we use a # different API to check for an explicit default. return key in cp.defaults() else: return cp.has_section(section) and (key in cp._sections[section]) def sect(s): return cyan('[{}]'.format(s)) src_section, src_key = src if has_explicit_option(src_section, src_key): if dst is not None: dst_section, dst_key = dst print('Found {src_key} in section {src_section}. Should be {dst_key} in section ' '{dst_section}.'.format(src_key=green(src_key), src_section=sect(src_section), dst_key=green(dst_key), dst_section=sect(dst_section)), file=sys.stderr) elif src not in notes: print('Found {src_key} in section {src_section} and there is no automated migration path' 'for this option. Please consult the ' 'codebase.'.format(src_key=red(src_key), src_section=red(src_section))) if (src_section, src_key) in notes: print(' Note for {src_key} in section {src_section}: {note}' .format(src_key=green(src_key), src_section=sect(src_section), note=yellow(notes[(src_section, src_key)])))
def test_req_two_six(driver): print("Test: Shows Paths test.") driver.get("http://" + ip + ":12345/main/") driver.find_element( By.CSS_SELECTOR, "#sideMenu > div.sidenav > button:nth-child(3)").click() driver.find_element( By.CSS_SELECTOR, "#sideMenu > div.sidenav > div:nth-child(4) > button:nth-child(3)" ).click() driver.find_element( By.CSS_SELECTOR, "#sideMenu > div.sidenav > div:nth-child(4) > div:nth-child(4) > a:nth-child(1)" ).click() driver.execute_script("document.getElementById('finish_reg_med').click();") if driver.current_url == 'http://' + ip + ':12345/main/': print(green('--- test passed short!!! ---')) else: print(red('--- test failed short!!! ---')) driver.find_element( By.CSS_SELECTOR, "#sideMenu > div.sidenav > button:nth-child(3)").click() driver.find_element( By.CSS_SELECTOR, "#sideMenu > div.sidenav > div:nth-child(4) > button:nth-child(3)" ).click() driver.find_element( By.CSS_SELECTOR, "#sideMenu > div.sidenav > div:nth-child(4) > div:nth-child(4) > a:nth-child(2)" ).click() driver.execute_script("document.getElementById('finish_reg_med').click();") if driver.current_url == 'http://' + ip + ':12345/main/': print(green('--- test passed med!!! ---')) else: print(red('--- test failed med!!! ---')) driver.find_element( By.CSS_SELECTOR, "#sideMenu > div.sidenav > button:nth-child(3)").click() driver.find_element( By.CSS_SELECTOR, "#sideMenu > div.sidenav > div:nth-child(4) > button:nth-child(3)" ).click() driver.find_element( By.CSS_SELECTOR, "#sideMenu > div.sidenav > div:nth-child(4) > div:nth-child(4) > a:nth-child(3)" ).click() driver.execute_script( "document.getElementById('finish_reg_long').click();") if driver.current_url == 'http://' + ip + ':12345/main/': print(green('--- test passed long!!! ---')) else: print(red('--- test failed long!!! ---')) return
def randomList(listA): print green("[+] Creating a random list of targets...") listB = [] for i in range(len(listA)): element = random.choice(listA) listA.remove(element) listB.append(element) return listB
def listContainers(self): print "\n Containers:\n" for container in self.orc.xmlcmdbobserver.getContainerlist(): container_type_name = container.attrib['type'] container_version = container.attrib['version'] container_UID = container.attrib['ResourceUID'] print "typename: " + green(container_type_name) + " version: " + green(container_version) + " UID: " + green(container_UID) print "count: " + str(len(self.orc.xmlcmdbobserver.getContainerlist()))
def extract(self): extract_folder = os.path.join(self.server_info['remote_path'], self.server_info['Extract_directory']) command = "cd %s && unzip -o %s" % (extract_folder, self.server_info['package_name']) stdin, stdout, stderr = self.connection.exec_command(command) for line in stdout.readlines(): print green(line) self.connection.close()
def main(): print("start") try: cl.green() except cl.MyGreenError as err: print(err) print(type(err).__name__) print("done")
def main(): print("start") try: cl.green() except Exception as err: print(err) print(type(err).__name__) print("done")
def run(self, *args): servers = self.nova.servers() for server in self.nova.servers(): # TODO: Add the IPs of each instance. if server.status == u"ACTIVE": print green(server.name) else: print red(server.name) return servers
def run(self): green('Starting watch for {0}'.format(market)) yellow( 'Checked every 0.5 seconds, only price/percentage changes will be shown' ) yellow( 'Price/Percentage changes are based on the moment the watch is started' ) yellow( 'Please note the Timestamp is taken from Bittrex, can be different then yours' ) red('q+enter to return to Main Menu') lastprice = '0' try: values = r.hmget(market, 'Ask') start_price = float(values[0]) except: white( 'Currency not available... or unable to retrieve data from redis.pontstrader.com' ) else: while not self._stopevent.isSet(): try: values = r.hmget(market, 'Ask', 'TimeStamp') price = float(values[0]) timestamp = values[1] except: red('Unable to retrieve data from redis.pontstrader.com, trying to recover...' ) else: percent = 100 * (float(price) - float(start_price) ) / float(start_price) if price != lastprice: if percent < 0.00: white( '{0} - The {1} price for 1 {2} is {3:.8f} {4}' .format(timestamp, trade, currency, price, trade) + (Fore.RED + ' ({0:.2f})%'.format(percent))) lastprice = price elif percent > 0.00: white( '{0} - The {1} price for 1 {2} is {3:.8f} {4}' .format(timestamp, trade, currency, price, trade) + (Fore.GREEN + ' ({0:.2f})%'.format(percent))) lastprice = price else: white( '{0} - The {1} price for 1 {2} is {3:.8f} {4} ({5:.2f}%)' .format(timestamp, trade, currency, price, trade, percent)) lastprice = price time.sleep(0.5) self._stopevent.wait(self._sleepperiod) white('Returning to Main Menu')
def __print_cross(results, encoding="gbk"): for (num, instance, result) in results: print "Num:%5s [%40s] Elapsed:%20s" % ( num, instance if result.succeed else colors.red(str(instance)), datetime.timedelta(seconds=result.elapsed)) print colors.green(str( result.result)) if result.succeed else colors.red( str(result.result))
def handleUrls(year, urls): for url in urls: print green("URL: " + url) r = requests.get(BASE_URL + url) s = BeautifulSoup(r.text) links = s.find("div", {"id": "tab-debate"}) if links: links = links.find_all("a") handleRosters(year, [a.get('href') for a in links])
def close_out_reporting(self): """Function to check the new database and tables and close the connections.""" confirm = input(green("[+] Job's done! Do you wan to view the HTML report now? (Y\\N) ")) if confirm == "Y" or confirm == "y": os.system("open '{}/report.html'".format(self.report_path)) else: print(green("[+] Exiting...")) exit() # Close the connection to the database self.conn.close()
def listContainers(self): print "\n Containers:\n" for container in self.orc.xmlcmdbobserver.getContainerlist(): container_type_name = container.attrib['type'] container_version = container.attrib['version'] container_UID = container.attrib['ResourceUID'] print "typename: " + green( container_type_name) + " version: " + green( container_version) + " UID: " + green(container_UID) print "count: " + str(len(self.orc.xmlcmdbobserver.getContainerlist()))
def __print_cross(results, encoding="gbk"): num = 0 for result in results: num += 1 if result is None or not isinstance(result, ExecuteOut): continue print "Num:%5s [%40s] Elapsed:%20s" % (num, result.instance if result.succeed else colors.red( str(result.instance)), datetime.timedelta(seconds=result.elapsed)) print colors.green(str(result.result)) if result.succeed else colors.red(str(result.result))
def unlink(self, link_target): try: sftp = self.connection.open_sftp() try: sftp.unlink(link_target) print green("unlink success") except IOError: print red("assume path is a folder(directory).") except EOFError: print red("open sftp failed.")
def parseName(file): outputFile = "ParsedNames.txt" # Read all of the names line by line with open(file,"r") as f: print green("[+] Parsing names and outputting to %s" % outputFile) names = [line.rstrip('\n').split(" ") for line in f] output = open(outputFile,"w") for name in names: output.write(str(name).replace("]","").replace("[","").replace("'","") + '\n') output.close() f.close()
def run(self): print(green("Like - Start")) while True: # presentID = "123" # No present_id in user info, just for show # API not implemented presentID = api.getNewUser()['present_id'] if self.currentpresentid != presentID: # Reset self.reset() self.timer.resetTiming() self.currentpresentid = presentID # First hour while True: if self.timer.tempelapsed >= self.maxtime[self.maxtimeindex]: del self.timings[:] if self.maxtimeindex == 1: self.timer.tempelapsed = 0 self.timings.append([600]) self.timings.append([750, 900]) self.timings.append([1200]) self.timings.append([1350, 1500]) self.timings.append([2100]) self.timings.append([2400, 2700]) self.timings.append([3600]) self.timer.setTiming(self.timings) self.maxtimeindex = 0 self.maxtimeindex = 1 result = self.timer.indicator() if result: self.starttime = time.time() threading.Thread(target=self.likeUser, args=(presentID,)).start() if len(self.timer.timing[0]) != 0 and self.show: self.timer.previousTime = self.timer.tempelapsed if self.lengthOfLiker != len(self.timer.timing[0]): self.lengthOfLiker = len(self.timer.timing[0]) if self.starttime != 0: self.stoptime = int(time.time() - self.starttime) print(green("\nLike - Waiting " + str(self.timer.timing[0][0] - (self.timer.previousTime-self.stoptime)) + " seconds until " + str(self.timer.timing[0][0]) + " seconds")) self.show = False elif len(self.timer.timing[0]) == 0: self.lengthOfLiker = 0 time.sleep(1)
def check_config_file(path): config = Config.load(configpath=path) print('Checking config file at {0} for unmigrated keys.'.format(path), file=sys.stderr) def section(s): return cyan('[{0}]'.format(s)) for (src_section, src_key), (dst_section, dst_key) in migrations.items(): if config.has_section(src_section) and config.has_option(src_section, src_key): print('Found {src_key} in section {src_section}. Should be {dst_key} in section ' '{dst_section}.'.format(src_key=green(src_key), src_section=section(src_section), dst_key=green(dst_key), dst_section=section(dst_section)), file=sys.stderr)
def check_md5(self): global rmd5sum def md5_checksum(file_path): with open(file_path, 'rb') as fh: m = hashlib.md5() while True: data = fh.read(8192) if not data: break m.update(data) return m.hexdigest() try: sftp = self.connection.open_sftp() local_path = os.path.join(self.server_info['local_path'], self.server_info['release_version']) ok_number = 0 file_number = 0 for (dirname, subdir, subfile) in os.walk(local_path): remote_path = os.path.join(self.server_info['remote_path'], self.server_info['release_version'], dirname[len(local_path)+1:]) for fname in subfile: file_abs_path = (os.path.join(dirname, fname)) lmd5sum = md5_checksum(file_abs_path) file_number += 1 remote_file_abs_path = (os.path.join(remote_path, fname)) try: command = "md5sum %s | awk '{print $1}'" % remote_file_abs_path stdin, stdout, stderr = self.connection.exec_command(command) for line in stdout.readlines(): rmd5sum = line.strip('\n') if lmd5sum != rmd5sum: print red("%s md5sum check failed.") % fname else: ok_number += 1 except Exception: print red("%s is not exsits.") % fname print green("The File number is %d md5sum check %d is OK") % (file_number, ok_number) sftp.close() except Exception as e: print ('*** caught exception: %s: %s' % (e.__class__, e)) traceback.print_exc() # try: # sftp.close() # except: # pass sys.exit(1)
def likeUser(self, presentID): # Get rand user and login randindex = randint(1, len(self.alluser)) randloginuser = self.Login.retrieveIndividual(randindex) #Login.removeIndividual(randloginuser) #api.loginUser(randloginuser['email'], randloginuser['password']) # Like user print(green("Like - " + randloginuser['firstname'] + " liked random user")) self.numofliker+=1 print(green("Like - Likers: " + str(self.numofliker))) #api.likerPresent(presentID) #api.logoutUser() self.show = True
def internal_symlink(*args): try: sftp = self.connection.open_sftp() if sftp: print green("open sftp success") try: sftp.listdir(self.server_info['release_path']) except IOError: sftp.mkdir(self.server_info['release_path']) print green("%s create success.") % self.server_info['release_path'] sftp.symlink(link_src, link_target) print green("create symlink success.") sftp.close() except Exception as e: print('*** Caught exception: %s: %s' % (e.__class__, e)) traceback.print_exc() # try: # sftp.close() # except: # pass sys.exit(1) print green("%s %s symlink success") % (link_src, link_target)
def listDeploySchema(self): # list deploySchemas print red("\n list deploy schemas:\n") for schema in self.orc.deploySchemaList: print green(schema.schema_name) # list Target Role Mappings print red("\nTargetRoleMapping: \n") dtm = DeployTargetManager() rm = RoleManager() for item in self.orc.deploySchema.targetRoleMapping: targetUID = item.text.split(':')[0].lstrip('{').rstrip('}') roleUID = item.text.split(':')[1].lstrip('{').rstrip('}') if dtm.getTargetNameByUID(targetUID): targetName = dtm.getTargetNameByUID(targetUID) if rm.queryRoleByUID(roleUID): roleName = rm.queryRoleByUID(roleUID) print green(targetName) + ' ===> ' + green(roleName) print "count: " + str(len(self.orc.deploySchema.targetRoleMapping)) print "\n" # list targetinstance target mappings print red("TargetInstancesTargetMapping:\n") for targetInstance in self.orc.deploySchema.targetInstList: targetUID = targetInstance.attrib['TargetUID'].lstrip('{').rstrip('}') targetInstanceUID = targetInstance.attrib['InstanceUID'] if dtm.getTargetNameByUID(targetUID): targetName = dtm.getTargetNameByUID(targetUID) print green(targetInstanceUID) + ' ===> ' + green(targetName) print "count: " + str(len(self.orc.deploySchema.targetInstList)) self.listContainers()
def create_instance(self, server, image, name, key_name=None): print "Creating instance: {}".format(name) print "This may take a while..." flavor = self.client.flavors.get(server.flavor['id']) instance = self.client.servers.create( name, image, flavor, security_groups=(item['name'] for item in server.security_groups), key_name=key_name or server.key_name, ) self.wait(instance) print green("Instance creation successful!") return instance
def check_config_file(path): cp = Config.create_parser() with open(path, 'r') as ini: cp.readfp(ini) config = SingleFileConfig(path, cp) print('Checking config file at {0} for unmigrated keys.'.format(path), file=sys.stderr) def section(s): return cyan('[{0}]'.format(s)) for (src_section, src_key), dst in migrations.items(): def has_explicit_option(section, key): # David tried to avoid poking into cp's guts in https://rbcommons.com/s/twitter/r/1451/ but # that approach fails for the important case of boolean options. Since this is a ~short term # tool and its highly likely its lifetime will be shorter than the time the private # ConfigParser_sections API we use here changes, its worth the risk. return cp.has_section(section) and (key in cp._sections[section]) if has_explicit_option(src_section, src_key): if dst is not None: dst_section, dst_key = dst print('Found {src_key} in section {src_section}. Should be {dst_key} in section ' '{dst_section}.'.format(src_key=green(src_key), src_section=section(src_section), dst_key=green(dst_key), dst_section=section(dst_section)), file=sys.stderr) elif (src_section, src_key) not in notes: print('Found {src_key} in section {src_section} and there is no automated migration path' 'for this option. Please consult the ' 'codebase.'.format(src_key=red(src_key), src_section=red(src_section))) if (src_section, src_key) in notes: print(' Note: {0}'.format(yellow(notes[(src_section, src_key)]))) # Check that all values are parseable. for sec in ['DEFAULT'] + cp.sections(): for key, value in cp.items(sec): value = value.strip() if value.startswith('['): try: custom_types.list_type(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON list.'.format(key=green(key), section=section(sec))) elif value.startswith('{'): try: custom_types.dict_type(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON object.'.format(key=green(key), section=section(sec)))
def snapshot(self, server): now = datetime.now() name = "{}-clone-{}".format(server.name, now).replace(' ', '-') print "Creating snapshot: {}".format(name) print "This may take a while..." try: id = self.client.servers.create_image(server, name) except exceptions.NotFound: msg = red("'{}' not found.\n".format(server.name)) msg += "Use 'stackctl list' to view active instances." raise WrapperFailure(msg) image = self.client.images.get(id) self.wait(image) print green("Snapshot creation successful!") return image
def main(): exiter = _Exiter() exiter.set_except_hook() def do_run(): _run(exiter) try: # Run with profiling, if requested. profile_path = os.environ.get('PANTS_PROFILE') if profile_path: import cProfile profiler = cProfile.Profile() try: profiler.runcall(do_run) finally: profiler.dump_stats(profile_path) print('Dumped profile data to {}'.format(profile_path)) view_cmd = green('gprof2dot -f pstats {path} | dot -Tpng -o {path}.png && ' 'open {path}.png'.format(path=profile_path)) print('Use, e.g., {} to render and view.'.format(view_cmd)) else: do_run() except KeyboardInterrupt: exiter.exit_and_fail('Interrupted by user.')