def travers_dir(dir): hashes_seen = {} duplicate_files = [] exclude = ['.git'] for root, dirs, files in os.walk(dir): dirs[:] = [d for d in dirs if d not in exclude] path = root.split(os.sep) # print((len(path) - 1) * '---', os.path.basename(root)) for file in files: # print(len(path) * '---', file) fname = dir + '/{}'.format(file) fname = fname.replace('//', '/') hash_md5 = hashlib.md5() try: with open(fname, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) fhash = hash_md5.hexdigest() # pp(hashes_seen) if fhash in hashes_seen: duplicate_files.append(fname) duplicate_files.append(hashes_seen[fhash]) # tmp_list = hashes_seen[fhash] # hashes_seen[fhash] = tmp_list.append(fname) else: hashes_seen[fhash] = fname except OSError as e: print("Some file was inaccessible...") pp(hashes_seen) pp(duplicate_files)
def test_open_input(setup): # LOCAL TEST ONLY path = 'tests/data/film佐伯.mp4' fmt_ctx = FormatCtx.open(path) assert InputFormat == type(fmt_ctx) print fmt_ctx fmt_ctx.open_decoder() #print fmt_ctx.video_codec_ctx, fmt_ctx.video_codec_ctx.coder #print fmt_ctx.audio_codec_ctx, fmt_ctx.audio_codec_ctx.coder pp( fmt_ctx.to_primitive(True) ) img = None which_frame = None for frame in fmt_ctx.next_frame(): t = float(frame.pkt_pts_f) if frame.type == 'video': which_frame = frame if t >= 15.0: break if which_frame: img = which_frame.process() fmt_ctx.close_decoder() if img: img.show()
def test_deter_field_string_dictionaries(self): self.MyFields.analyze_fields(None, self.overrides) assert self.MyFields.field_case[self.id_col] is None assert self.MyFields.field_case[self.very_mixed_col] == 'mixed' assert self.MyFields.field_case[self.lower_col] == 'lower' assert self.MyFields.field_case[self.upper_col] == 'upper' assert self.MyFields.field_case[self.empty_col] is None assert self.MyFields.field_case[self.float_col] is None assert self.MyFields.field_case[self.string_col] == 'lower' assert self.MyFields.field_max_length[self.id_col] is None print('\n=============================') pp(self.MyFields.field_freqs[self.upper_col]) pp(self.MyFields.field_max_length[self.upper_col]) print('=============================') assert self.MyFields.field_max_length[self.upper_col] == 3 assert self.MyFields.field_max_length[self.empty_col] is None assert self.MyFields.field_max_length[self.float_col] is None assert self.MyFields.field_max_length[self.string_col] == 3 assert self.MyFields.field_min_length[self.id_col] is None assert self.MyFields.field_min_length[self.upper_col] == 3 assert self.MyFields.field_min_length[self.empty_col] is None assert self.MyFields.field_min_length[self.float_col] is None assert self.MyFields.field_min_length[self.string_col] == 3 assert self.MyFields.field_mean_length[self.id_col] is None assert self.MyFields.field_mean_length[self.upper_col] == 3 assert self.MyFields.field_mean_length[self.empty_col] is None assert self.MyFields.field_mean_length[self.float_col] is None assert self.MyFields.field_mean_length[self.string_col] == 3
def parseXMLFile(file, verbose): tree = etree.parse(file) root = tree.getroot() uuids = root.findall(".//UUID/Guid") data = {} data['TextureEntry'] = parse_textureEntry(root.findtext(".//TextureEntry")) GroupPosition = root.find(".//GroupPosition") data['GroupPosition'] = getDict(GroupPosition) data['OffestPosition'] = getDict( root.find(".//OffsetPosition")) data['RotationOffset'] = getDict(root.find(".//RotationOffset")) data['Color'] = getDict(root.find(".//Color")) data['ProfileShape'] = root.findtext(".//ProfileShape") data['PathBegin'] = root.findtext(".//PathBegin") data['PathCurve'] = root.findtext(".//PathCurve") data['PathEnd'] = root.findtext(".//PathEnd") data['PathRadiusOffset'] = root.findtext(".//PathRadiusOffset") data['PathRevolutions'] = root.findtext(".//PathRevolutions") data['PathScaleX'] = root.findtext(".//PathScaleX") data['PathScaleY'] = root.findtext(".//PathSCaleY") data['PathShearX'] = root.findtext(".//PathShearX") data['PathShearY'] = root.findtext(".//PathShearY") data['PathSkew'] = root.findtext(".//PathSkew") data['PathTaperX'] = root.findtext(".//PathTaperX") data['PathTaperY'] = root.findtext(".//PathTaperY") data['PathTwist'] = root.findtext(".//PathTwist") data['PathTwistBegin'] = root.findtext(".//PathTwistBegin") data['Scale'] = getDict(root.find(".//Scale")) data['ParentID'] = root.find(".//ParentID").text if verbose: pp(data) return data
def main(argv): if(len(sys.argv) != 2): sys.exit('Usage: %s <FQDN-of-ExternalHost>' % (sys.argv[0])) targethost = argv[1] logging.basicConfig(level=logging.INFO) #logging.getLogger('suds.client').setLevel(logging.DEBUG) global client client = Client() # already exists? print "Checking if exists...\n" if already_exists(targethost): exit(1) # stop here - exit cleanly if it does NOT exist exit(0) # add externalhost client.get_dns().add_externalhost_record(targethost,'',view_name="Default View") # re-check it recheck=client.get_dns().get_externalhost_record(targethost,"nope",view_name="Default View") # double-check results print recheck.name print recheck.type print recheck.id pp(recheck.properties) client.logout() exit(0)
def main(): """ Main function """ auth = authorization.authorize() # Uncomment these lines to show that the auth is actually there and working. # response = requests.get(TIMELINE_URL, auth=auth) # print json.dumps(response.json(), indent=4) # Since we're already logged on at this point I can use the # argparser to read the positional arguments and execute # what the user wants to do... # Command line parser here parser = make_parser() arguments = parser.parse_args() arguments = vars(arguments) command = arguments.pop("command") print command if command == "post": payload = arguments r = requests.post(CHIRP_URL, params=payload, auth=auth) print "URL: ", r.url print "STATUS_CODE: ", r.status_code print "TEXT: " + r.text elif command =="show": payload = arguments r = requests.get(SHOW_URL, params=payload, auth=auth) print "URL: ", r.url print "STATUS_CODE: ", r.status_code pp(r.json(), width=40, indent=2) else: print "No post message to send a tweet"
def main(): # import the northwind # module in a container obejct container = getContainer('northwind') # now we have all types loaded # in the above variable db = container.Products() # getting all products products = db.getAll() print '# All products' pp(products) # getting product with id 3 product = db.get(3) print '\n# Retrieved product with id 3' print product # adding a new one p = container.Product() p.id = 4 p.name = "Gizmo 4" p.price = '14.95' db.add(p) print '\n# Product after adding new item' pp(products) #reused reference
def setUpListener(self): GPIO.setmode(GPIO.BOARD) # set chip select GPIO.setup(CS, GPIO.OUT) # set up buttons, leds and spi for k, v in self.cfg.Taster.iteritems(): # set up buttons and button leds v = int(v) GPIO.setup(v, GPIO.IN) self.led[v] = int(self.cfg.TasterLED[k]) GPIO.setup(self.led[v], GPIO.OUT) GPIO.add_event_detect(v, GPIO.FALLING, callback=self.btn_handler, bouncetime=300) # set up spi values self.spichan1[v] = self.calcSPIOutput(1, self.cfg.SPIChan1[k]) self.spichan2[v] = self.calcSPIOutput(0, self.cfg.SPIChan2[k]) btns=self.cfg.QualleColors[k].split(";") pins=[] for b in btns: if b != "": pin = int(self.cfg.ColorLED[b]) pins.append(pin) self.colorled[v] = pins # color leds for k, v in self.cfg.ColorLED.iteritems(): v = int(v) GPIO.setup(v, GPIO.OUT) self.colorpins.append(v) pp(self.led)
def updatep1(db,rec,field, value): r = db.get(rec) if r == None: r = {'_id':rec,field:value } else: r['field']=value pp(db.save(r))
def test_solve(): pixel_scale = 1000.0 world_points = matrix([[0.0, 0.0, 0.0, 1.0], [20.0, 0.0, 0.0, 1.0], [0.0, 15.0, 0.0, 1.0], [0.0, 0.0, 10.0, 1.0], [1.0, 1.0, 1.0, 1.0], [2.0, 1.0, 1.0, 1.0], [1.0, 2.0, 1.0, 1.0], [1.0, 1.0, 2.0, 1.0]]).T theta = 1.5 s = math.sin(theta) c = math.cos(theta) R = matrix([[ c, -s, 0.0], [ s, c, 0.0], [0.0, 0.0, 1.0]]) P = hstack([R, matrix([[0., 0., 30.]]).T]) print P image_points = P * world_points from pprint import pprint as pp pp(world_points) pp(image_points) image_points = matrix([[r[0,0]/r[0,2], r[0,1]/r[0,2]] for r in image_points.T]).T image_points = 1000.0 * image_points keys = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] world_points = dict(zip(keys, [tuple(p) for p in array(world_points[:3, :].T)])) image_points = dict(zip(keys, [tuple(p) for p in array(image_points.T)])) print solve(world_points, image_points, pixel_scale=pixel_scale)
def question4(filename): human = read_protein(HUMAN_EYELESS_URL) fly = read_protein(FRUITFLY_EYELESS_URL) scoring = read_scoring_matrix(PAM50_URL) distr, raw = generate_null_distribution(human, fly, scoring, 1000) from pprint import pprint as pp distr = str_keys(distr) pp(distr) distr = norm(distr) pairs = list(distr.iteritems()) pairs = sorted(pairs, key=itemgetter(0)) print(pairs) index = np.arange(len(pairs)) plt.bar(index, map(itemgetter(1), pairs)) plt.xticks(index + 0.4, map(itemgetter(0), pairs), fontsize=8) plt.xlabel('Scores') plt.ylabel('Fraction of total trials') plt.title('Distribution of scores') plt.tight_layout() plt.savefig(filename) s_score = 875 n = 1000 mean = sum(raw) / n std = np.sqrt(sum((x - mean) ** 2 for x in raw) / n) z_score = (s_score - mean) / std print('mean = %f' % mean) print('std = %f' % std) print('z_score = %f' % z_score)
def pretty_print_list(file_name=None, data_format="JSON"): # print YAML or JSON representations of list data assert(file_name is not None), "Provide a file name" assert((data_format == "JSON" or data_format == "YAML")), ("Format must be 'JSON'" " or 'YAML'") try: formatted_list = [] with open(file_name, "r") as f: if data_format == "JSON": some_list = jload(f) formatted_list = jdumps(some_list) elif data_format == "YAML": some_list = yload(f) formatted_list = ydump(some_list, default_flow_style=False, explicit_start=True, width=1, indent=2) except IOError as e: print "Could not read file: %s" % e except Exception as e: print "Unexpected exception: %s" % e print "======================" print "list from file: %s in %s data_format:" % (file_name, data_format) print "======================" print formatted_list print "======================" print "list from file: %s in pretty_print native python" % file_name print "======================" pp(some_list, width=1)
def getTotalCost(itemSet): all_files = glob.glob('./Stores/*.txt') dict_list = [{},{},{},{},{}] new_dict_list = [] return_dict = {} count = 0 for file in all_files: with open(file,'r') as fptr: all_lines = fptr.readlines() filename = file.split('.')[1] filename = filename[8:] for line in all_lines[3:]: line = line.split(',') cpu = line[0].strip() price = line[1].strip() price = float(price[1:]) dict_list[count][cpu] = price new_dict_list.append([filename,dict_list[count]]) count += 1 for cpu_name, quant in itemSet: for dict_item in new_dict_list: if cpu_name in dict_item[1].keys(): product = quant * dict_item[1][cpu_name] #return_dict[dict_item[0]] += product if dict_item[0] in return_dict.keys(): return_dict[dict_item[0]] += round(product,2) else: return_dict[dict_item[0]] = round(product,2) pp(return_dict) return return_dict
def construct_frames(inp): frames_bin=[] f=open(inp,'r') content=''.join(f.readlines()) frames_ascii=[content[i:i+frame_len] for i in range(0,len(content),frame_len)] pp(frames_ascii) for f in frames_ascii: print '_ ',f for fr in frames_ascii: if(len(fr)<frame_len): fr=fr+'0'*(frame_len-len(fr)) fr_bin='' for ch in fr: ch_bin=bin(ord(ch))[2:] fr_bin+='0'*(ch_len-len(ch_bin))+ch_bin #ensure it is 7 bits len frames_bin.append(fr_bin) frames_fcs_bin=[] for fr in frames_bin: fcs_bin=bin(sum(int(x) for x in list(fr)))[2:] fcs_bin='0'*(fcs_len-len(fcs_bin))+fcs_bin frames_fcs_bin.append(fr+fcs_bin) for f in frames_fcs_bin: print '_ ',f return frames_fcs_bin
def main(): usage="Usage: \n"+ sys.argv[0]+" s|d max_for_0 min_for_1 srv_ip srv_port (max_for_0 could be > min_for_1)\n"+ sys.argv[0]+" bench num_times srv_ip srv_port\n" if(len(sys.argv)<5): print usage; sys.exit() if(sys.argv[1]=='bench'): globals()['ncat_srv']=sys.argv[3] globals()['ncat_srv_port']=int(sys.argv[4]) bench(int(sys.argv[2])) if(len(sys.argv)<5): print usage; sys.exit() globals()['bw_max_0']=int(sys.argv[2]) globals()['bw_min_1']=int(sys.argv[3]) globals()['ncat_srv']=sys.argv[4] globals()['ncat_srv_port']=int(sys.argv[5]) if(sys.argv[1]=='s'): frames_bin=construct_frames(inp) pp(frames_bin) sender(frames_bin) if(sys.argv[1]=='d'): receiver()
def draw(self, Draw): """ :param self: Worker.Worker :param Draw: int """ if not Draw in (64, 50, 34): pp(self.hurricanes) return self.ploywriter.field('ID') self.ploywriter.field('NAME') self.ploywriter.field('YYYYMMDD','C','10') self.ploywriter.field('HOURS','N') self.ploywriter.field('NE','N') self.ploywriter.field('NW','N') self.ploywriter.field('SE','N') self.ploywriter.field('SW','N') for h in self.hurricanes: for record in h.Records: #Get shape information center = (record['lon'],record['lat']) q = Quarter([record["kt%d-%d" % (Draw, i)] for i in [1,2,3,4]], center) pointList = q.get_pointList() self.ploywriter.poly(parts=[pointList]) self.ploywriter.record(h.Id,h.Name,record['date'],record['hours'], record['kt%d-1' % Draw], record['kt%d-2' % Draw], record['kt%d-3' % Draw], record['kt%d-4' % Draw])
def transform_file(ead_file, outdir, index, template): # http://stackoverflow.com/questions/2507808/ if os.stat(ead_file)[6]==0: return message, valid = eadator.validate(ead_file) if not valid: pp(message) return xml = etree.parse(ead_file).getroot() # http://stackoverflow.com/questions/541390/ fileName, fileExtension = os.path.splitext(ead_file) htmlFile = os.path.basename(fileName) htmlSubDir = os.path.basename(os.path.dirname(fileName)) htmlOutDir = os.path.join(outdir, htmlSubDir) outFile = os.path.join(htmlOutDir, ''.join([htmlFile,'.html'])) index.append(os.path.join('./', htmlSubDir, ''.join([htmlFile,'.html']))) mkdir_p(htmlOutDir) try: if template: res = XSLT(xml, template=''.join(["'", template, "'"])) else: res = XSLT(xml) res.write(outFile) except etree.XSLTApplyError: pp(XSLT.error_log)
def test(graph): optimum = compute_tsp_pd(graph, max_weight=10) pp(optimum) results, tests = run_tests(graph) for test in xrange(len(tests)): print tests[test] best_iter = 1000 best_val = State.infinite for value, step in results[test]: if value < best_val: best_val = value best_iter = step elif value == best_val: best_iter = min(best_iter, step) print best_val, best_iter # print results[test] # print # print result # print # print str(min(result) - optimum[0]) print
def read(self, verbose=False): """ :param self: Worker.Worker :param verbose: boolean """ getOrElse = lambda p,q: float(p[q]) * 1825 if p.has_key(q) else 0.0; csvdict = csv.DictReader(self.fd) #Let's store everything on memory alldata = [d for d in csvdict] if verbose: pp(alldata) else: pp(len(alldata)) last_id = None hrecords = [] name = None hurricane_id = None for entry in alldata: if entry['Storm identification number'] != last_id and last_id != None: self.hurricanes.append(self.processRecords(hrecords, name, hurricane_id)) hrecords = [] hurricane_id = entry['Storm identification number'] name = entry['storm name'] hrecords.append((entry['year'] + entry['MMDDHH'].rjust(6,'0')[0:4], float(entry['MMDDHH'][-2:]), -float(entry['longitude (deg W)']), float(entry['latitude (deg N)']), getOrElse(entry, 'NE(34)'), getOrElse(entry,'NW(34)'), getOrElse(entry,'SE(34)',), getOrElse(entry,'SW(34)'), #34knots getOrElse(entry, 'NE(50)'), getOrElse(entry,'NW(50)'), getOrElse(entry,'SE(50)',), getOrElse(entry,'SW(50)'), #50knots getOrElse(entry, 'NE(64)'), getOrElse(entry,'NW(64)'), getOrElse(entry,'SE(64)',), getOrElse(entry,'SW(64)') )) last_id = entry['Storm identification number'] self.draw(Draw)
def cli(family_file, family_type, to_json, to_madeline, to_ped, to_dict, outfile, logfile, loglevel): """Cli for testing the ped parser.""" from pprint import pprint as pp my_parser = FamilyParser(family_file, family_type) if to_json: if outfile: outfile.write(my_parser.to_json()) else: print(my_parser.to_json()) elif to_madeline: for line in my_parser.to_madeline(): if outfile: outfile.write(line + '\n') else: print(line) elif to_ped: for line in my_parser.to_ped(): if outfile: outfile.write(line + '\n') else: print(line) elif to_dict: pp(my_parser.to_dict())
def test_run(): print "\n>>> print list(Q('url',ex1))" print list(Q('url',ex1)) assert list(Q('url',ex1)) == ['url1','url2','url3'] assert Ql('url',ex1) == ['url1','url2','url3'] print "\n>>> print list(Q(['name','id'],ex1))" print list(Q(['name','id'],ex1)) assert Ql(['name','id'],ex1) == ['Gregg','hello','gbye'] print "\n>>> print Ql('more url',ex1)" print Ql('more url',ex1) print "\n>>> list(Q('extensions',ex1))" print list(Q('extensions',ex1)) print "\n>>> print Ql('extensions',ex1)" print Ql('extensions',ex1) print "\n>>> printout(['name','extensions'],[ex1,], extrasaction='ignore')" printout(['name','extensions'],[ex1,], extrasaction='ignore') print "\n\n" from pprint import pprint as pp print "-- note that the extension fields are also flattened! (and N/A) -- " pp(denorm(['location','fxVersion','notthere','survey_data extensions'],[ex2,], default="N/A")[:2])
def lines(self, ids=None, done=None): def _compute_tree(cr, uid, ids, field_names, context=None): account_obj = self.pool.get('account.analytic.account') recres = [] def recursive_computation(account, res = None, level = 0): res = res or [] res.append((account, level)) for son in account.child_complete_ids: recursive_computation(son, res, level + 1) return res for account in account_obj.browse(cr, uid, ids, context=context): recres.extend(recursive_computation(account)) return recres ctx = self.context.copy() obj_account = self.pool.get('account.analytic.account') obj_report = self.pool.get('isf.hgh.coaa.account') report_data = obj_report.read(self.cr, self.uid, ctx['active_ids'], ['account']) if report_data[0]['account']: ids = [report_data[0]['account'][0]] else: ids = obj_account.search(self.cr, self.uid, [('parent_id', '=', False)]) res = _compute_tree(self.cr, self.uid, ids, ['type','code','name','debit','credit','balance','parent_id','level','child_complete_ids'], ctx) pp(res) return res
def test_top_value_info(self): pp(self.field_struct) assert self.field_struct['field_0']['top_values']['top_values'] == 'not shown - all are unique' assert self.field_struct['field_1']['top_values']['2'] == '1' assert self.field_struct['field_1']['top_values']['6'] == '2' assert self.field_struct['field_1']['top_values']['8'] == '1' assert self.field_struct['field_1']['top_values']['19'] == '1'
def setup(): print("") print("creating registration controller") global registration registration = RegistrationController(app.config) registration.set_db(db) pp(registration)
def call_n_times(times, func, *args): start = time.clock() pp("Starting loop") for i in range(1, times): func(*args) end = time.clock() pp("Elapsed time is {}".format(end - start))
def _get_attribute_type_conversion(self, cr, uid, job, conversion_type, \ mappingline, record): attribute_obj = self.pool.get('product.attribute') transform_dict = attribute_obj._frontend_options() pp(record) return {'ttype': transform_dict[record[mappingline['mage_fieldname']]]}
def wait_for_object_state(self, field, value, method, key, *args): ''' This method provides an abstract way to poll any arbitrary object, such as pool, member, vip, etc. :param field: the attribute in the object that you want to monitor :param value: the final value that you want to see :param method: the show method that returns the object containing state :param key: the key to a aub-dict in the show method output that contains the state :param args: necessary args that must be passed to the show method :return: N/A, raise if 'value' is not seen within 60 seconds ''' time.sleep(self.polling_interval) current_state = method(*args)[key] current_value = current_state[field] attempts = 0 while value != current_value: sys.stdout.flush() current_state = method(*args)[key] pp(current_state) current_value = current_state[field] time.sleep(self.polling_interval) attempts = attempts + 1 if attempts >= self.max_attempts: raise MaximumNumberOfAttemptsExceeded
def get_titles(self): """ Return a list of titles grabbed from a Topix Newswire page. """ # grab topix content filename = wget.filename_from_url(self.topix_url) # get filename print "[DEBUG] Downloading from topix..." with open(wget.download(self.topix_url)) as raw: # download and open content = raw.readlines() # save content as list print "[DEBUG] Content saved." try: remove(filename) # remove downloaded file, if exist except: print "[DEBUG] Cannot download topix page." return 0 # filter results titles = [] # container for titles for line in content: if "<a t=\"artclick\"" in line: # find and filter out title titles.append(self.rmtags(line[:line.find("<img")]).strip()) pp(titles) # pretty print titles to console # return list of titles return titles
def main(): while True: query = raw_input('--> ') try: sock = socket.socket() except Exception as e: print e print "Interactor: Failure when creating socket" try: sock.connect((searchEngineIp, searchEnginePort)) except Exception as e: print e print "Interactor: Failure when connecting newsParser" exit(-1) try: sock.send(json.dumps({"query":query, "page": 0, "size": 10})) except Exception as e: print e print "Interactor: Failure when sending mesg" try: raw = sock.recv(100000) except Exception as e: print e print "Interactor: Failure recving mesg" pp(json.loads(raw)) sock.close()
def __init__(self, *args, **kwargs): tk.Tk.__init__(self, *args, **kwargs) #tk.Tk.iconbitmap(self, default="clienticon.ico") tk.Tk.wm_title(self, "Sea of BTC client") container = tk.Frame(self) pp( dir(container) ) container.pack(side="top", fill="both", expand = True) container.grid_rowconfigure(0, weight=1) container.grid_columnconfigure(0, weight=1) self.frames = {} for F in (StartPage, PageOne, PageTwo, PageThree): frame = F(container, self) self.frames[F] = frame frame.grid(row=0, column=0, sticky="nsew") self.show_frame(StartPage)
Author: SoftLayer Technologies, Inc. <*****@*****.**> """ import SoftLayer.API from pprint import pprint as pp # Your SoftLayer API key and username. USERNAME = '******' API_KEY = 'set me' # The VLAN id you wish to cancel vlanId = 563298 # Declare the API client client = SoftLayer.Client(username=USERNAME, api_key=API_KEY) networkVlanService = client['SoftLayer_Network_Vlan'] billingItemService = client['SoftLayer_Billing_Item'] # Declaring an object mask to get the billing item information objectMask = 'mask[billingItem]' try: # Getting the Billing Item to cancel the VLAN service. vlan = networkVlanService.getObject(mask=objectMask, id=vlanId) pp(vlan) # Canceling the VLAN service. result = billingItemService.cancelService(id=vlan['billingItem']['id']) pp(result) except SoftLayer.SoftLayerAPIError as e: print("Unable to cancel the VLAN. faultCode=%s, faultString=%s" % (e.faultCode, e.faultString)) exit(1)
def variant_verification( store, institute_id, case_name, variant_id, sender, variant_url, order, comment, url_builder=None, mail=None, user_obj=None, ): """Sand a verification email and register the verification in the database Args: store(scout.adapter.MongoAdapter) institute_obj(dict): an institute object case_obj(dict): a case object user_obj(dict): a user object variant_obj(dict): a variant object (snv or sv) sender(str): current_app.config['MAIL_USERNAME'] variant_url(str): the complete url to the variant (snv or sv), a link that works from outside scout domain. order(str): False == cancel order, True==order verification comment(str): sender's entered comment from form url_builder(flask.url_for): for testing purposes, otherwise test verification email fails because out of context """ url_builder = url_builder or url_for mail = mail or ex_mail user_obj = user_obj or store.user(current_user.email) data = variant_controller( store, institute_id, case_name, variant_id=variant_id, add_case=True, add_other=False, get_overlapping=False, ) variant_obj = data["variant"] case_obj = data["case"] institute_obj = data["institute"] pp(variant_obj) recipients = institute_obj["sanger_recipients"] if len(recipients) == 0: raise MissingVerificationRecipientError() view_type = None email_subject = None category = variant_obj.get("category", "snv") display_name = variant_obj.get("display_name") chromosome = variant_obj["chromosome"] position = variant_obj["position"] end_chrom = variant_obj.get("end_chrom", chromosome) chr_position = (":".join([chromosome, str(variant_obj["position"])]) if category in ["snv"] else "-") breakpoint_1 = (":".join([chromosome, str(variant_obj["position"])]) if category in ["sv", "cancer_sv"] else "-") breakpoint_2 = (":".join([end_chrom, str(variant_obj.get("end"))]) if category in ["sv", "cancer_sv"] else "-") variant_size = variant_obj.get("length") panels = ", ".join(variant_obj.get("panels", [])) gene_identifiers = [ str(ident) for ident in variant_obj.get( "hgnc_symbols", variant_obj.get("hgnc_ids", [])) ] hgnc_symbol = ", ".join(gene_identifiers) email_subj_gene_symbol = None if len(gene_identifiers) > 3: email_subj_gene_symbol = " ".join( [str(len(gene_identifiers)) + "genes"]) else: email_subj_gene_symbol = hgnc_symbol gtcalls = [ "<li>{}: {}</li>".format(sample_obj["display_name"], sample_obj["genotype_call"]) for sample_obj in variant_obj["samples"] ] tx_changes = [] external_primer_link = "" if category == "snv": # SNV view_type = "variant.variant" tx_changes = [] external_primer_link = external_primer_order_link( variant_obj, case_obj["genome_build"]) for gene_obj in variant_obj.get("genes", []): for tx_obj in gene_obj["transcripts"]: # select refseq transcripts as "primary" if not tx_obj.get("refseq_id"): continue for refseq_id in tx_obj.get("refseq_identifiers"): transcript_line = [] transcript_line.append( gene_obj.get("hgnc_symbol", gene_obj["hgnc_id"])) transcript_line.append("-".join( [refseq_id, tx_obj["transcript_id"]])) if "exon" in tx_obj: transcript_line.append("".join( ["exon", tx_obj["exon"]])) elif "intron" in tx_obj: transcript_line.append("".join( ["intron", tx_obj["intron"]])) else: transcript_line.append("intergenic") if "coding_sequence_name" in tx_obj: transcript_line.append( urllib.parse.unquote( tx_obj["coding_sequence_name"])) else: transcript_line.append("") if "protein_sequence_name" in tx_obj: transcript_line.append( urllib.parse.unquote( tx_obj["protein_sequence_name"])) else: transcript_line.append("") if "strand" in tx_obj: transcript_line.append(tx_obj["strand"]) else: transcript_line.append("") if refseq_id in gene_obj["common"]["primary_transcripts"]: transcript_line.append("<b>primary</b>") else: transcript_line.append("") tx_changes.append("<li>{}</li>".format( ":".join(transcript_line))) else: # SV view_type = "variant.sv_variant" display_name = "_".join( [breakpoint_1, variant_obj.get("sub_category").upper()]) # body of the email html = verification_email_body( case_name=case_obj["display_name"], url= variant_url, # this is the complete url to the variant, accessible when clicking on the email link display_name=display_name, category=category.upper(), subcategory=variant_obj.get("sub_category").upper(), breakpoint_1=breakpoint_1, breakpoint_2=breakpoint_2, chr_position=chr_position, hgnc_symbol=hgnc_symbol, panels=panels, gtcalls="".join(gtcalls), tx_changes="".join(tx_changes) or "Not available", name=user_obj["name"].encode("utf-8"), comment=comment, external_primer_link=external_primer_link, ) # build a local the link to the variant to be included in the events objects (variant and case) created in the event collection. local_link = url_builder( view_type, institute_id=institute_obj["_id"], case_name=case_obj["display_name"], variant_id=variant_obj["_id"], ) if order == "True": # variant verification should be ordered # pin variant if it's not already pinned if case_obj.get("suspects") is None or variant_obj[ "_id"] not in case_obj["suspects"]: store.pin_variant(institute_obj, case_obj, user_obj, local_link, variant_obj) email_subject = "SCOUT: validation of {} variant {}, ({})".format( category.upper(), display_name, email_subj_gene_symbol) store.order_verification( institute=institute_obj, case=case_obj, user=user_obj, link=local_link, variant=variant_obj, ) else: # variant verification should be cancelled email_subject = "SCOUT: validation of {} variant {}, ({}), was CANCELLED!".format( category.upper(), display_name, email_subj_gene_symbol) store.cancel_verification( institute=institute_obj, case=case_obj, user=user_obj, link=local_link, variant=variant_obj, ) kwargs = dict( subject=email_subject, html=html, sender=sender, recipients=recipients, # cc the sender of the email for confirmation cc=[user_obj["email"]], ) message = Message(**kwargs) # send email using flask_mail mail.send(message)
"signal": [1, 2, 3] }, "noises": { "noise": [5, 6, 7] }, "rounds": { "round_59": { "fail": 1, "test_time": "2013-11-11 17:54:21", "test_result": "Pass" }, "round_60": { "fail": 2, "test_time": "2013-11-11 17:55:23", "test_result": "fail" } } } print("original:") pp(example) print("dict to xml:") myxml = dumps(example) print(myxml) print(type(myxml)) print("xml to dict:") mydict = loads(myxml) pp(mydict) print mydict[u"错误信息"]
def test_オブジェクト名を使用した複雑なマルチタームルールからfilterを生成できる(self): repository = DefinitionRepository() repository.add_host_object(hostname='network1', ipaddress='192.168.0.0/24') repository.add_host_object(hostname='network2', ipaddress='192.168.1.0/24') repository.add_host_object(hostname='host1', ipaddress='10.0.1.50/32') repository.add_host_object(hostname='host2', ipaddress='10.0.1.51/32') repository.add_port_object(portname='udp53', protocol='udp', port=53) repository.add_port_object(portname='tcp53', protocol='tcp', port=53) repository.add_port_object(portname='default_udp_highport1', protocol='udp', port='32768-65535') repository.add_port_object(portname='default_udp_highport2', protocol='udp', port='50000-65535') router = Router1() router.assign_interface(interfacename='irb100', filtername='irb100in', address='192.168.0.1/24') router.assign_interface(interfacename='irb110', filtername='irb110in', address='192.168.1.1/24') router.set_repository(repository) router.add_rule( name='TERM1', srcaddr=['network1', 'network2'], srcport=['default_udp_highport1', 'default_udp_highport2'], dstaddr=['host1', 'host2'], dstport=['udp53'], # srcportのプロトコルと異なるプロトコルは指定できない action='accept', generate_return_rule=True, order_priority=49, ) # TODO: プロトコルが複数指定できるかを確認する actual = router.create_filter_configuration() expect = [ "set firewall filter irb100in term TERM1 source-address 192.168.0.0/24", "set firewall filter irb100in term TERM1 destination-address 10.0.1.50/32", "set firewall filter irb100in term TERM1 destination-address 10.0.1.51/32", "set firewall filter irb100in term TERM1 source-port 32768-65535", "set firewall filter irb100in term TERM1 source-port 50000-65535", "set firewall filter irb100in term TERM1 destination-port 53", "set firewall filter irb100in term TERM1 protocol tcp", "set firewall filter irb100in term TERM1 accept", "set firewall filter irb100in term TERM1 source-address 192.168.1.0/24", "set firewall filter irb100in term TERM1 destination-address 10.0.1.50/32", "set firewall filter irb100in term TERM1 destination-address 10.0.1.51/32", "set firewall filter irb100in term TERM1 source-port 32768-65535", "set firewall filter irb100in term TERM1 source-port 50000-65535", "set firewall filter irb100in term TERM1 destination-port 53", "set firewall filter irb100in term TERM1 protocol udp", "set firewall filter irb100in term TERM1 accept", "set firewall filter irb110in term TERM1 source-address 192.168.0.0/32", "set firewall filter irb110in term TERM1 destination-address 10.0.1.50/32", "set firewall filter irb110in term TERM1 destination-address 10.0.1.51/32", "set firewall filter irb110in term TERM1 source-port 32768-65535", "set firewall filter irb110in term TERM1 source-port 50000-65535", "set firewall filter irb110in term TERM1 destination-port 53", "set firewall filter irb110in term TERM1 protocol udp", "set firewall filter irb110in term TERM1 accept", "set firewall filter irb110in term TERM1 source-address 192.168.1.0/32", "set firewall filter irb110in term TERM1 destination-address 10.0.1.50/32", "set firewall filter irb110in term TERM1 destination-address 10.0.1.51/32", "set firewall filter irb110in term TERM1 source-port 32768-65535", "set firewall filter irb110in term TERM1 source-port 50000-65535", "set firewall filter irb110in term TERM1 destination-port 53", "set firewall filter irb110in term TERM1 protocol udp", "set firewall filter irb110in term TERM1 accept", ] pp(expect) pp(actual) assert_that(actual).is_equal_to(expect)
def test_get_db(): o = col.find_one({'_id': 1234}) html = o['html'] # print html pp(o)
nv = self.addVertex(t) self.vertList[f].addNeighbor(self.vertList[t], cost) def getVertices(self): return self.vertList.keys() def __iter__(self): return iter(self.vertList.values()) g = Graph() for i in range(6): g.addVertex(i) from pprint import pprint as pp pp(g.vertList) g.addEdge(0, 1, 5) g.addEdge(0, 5, 2) g.addEdge(1, 2, 4) g.addEdge(2, 3, 9) g.addEdge(3, 4, 7) g.addEdge(3, 5, 3) g.addEdge(4, 0, 1) g.addEdge(5, 4, 8) g.addEdge(5, 2, 1) for v in g: for w in v.getConnections(): print("( %s , %s )" % (v.getId(), w.getId()))
user = os.popen("whoami").read() rpistr = "ls /media/"+user+" > usbs.txt" usb = os.popen("ls /media/"+user).read() system["USB"] = usb.split("\n") # logger.warning("New mounted drive in system are: "+str(system["USB"])) p=subprocess.Popen(rpistr,shell=True, preexec_fn=os.setsid) def update_lense_server(): data = system response = requests.put(const.LENSE_END_POINT + '/update/' ,json=data) if response.status_code == 200: return json.loads(response.content.decode('utf-8')) else: return response.status_code house_info() memory_stats() cpu_stats() process_stats() sys_boot_time() detect_usb_presence() # print(update_lense_server()) # response = json.dump(update_lense_server()) print(system) for key,value in memory.iteritems(): pp((key,value)) time.sleep(5)
f = [len(str(factorial(x))) for x in range(20)] print(f, type(f)) # Set Comprehensions s = {len(str(factorial(x))) for x in range(20)} print(s) # Dictionary Comprehensions urls = { 'Google': 'https://google.com', 'Pluralsight': 'https://pluralsight.com' } pp(urls) # When duplicate keys the later keys replaces the older ones inverted_urls = {url: name for name, url in urls.items()} pp(inverted_urls) ############################## Filtering Comprehensions ################################# from math import sqrt def is_prime(x): if x < 2: return False for i in range(2, int(sqrt(x)) + 1): if x % i == 0: return False
import napalm from pprint import pprint as pp from time import sleep driver = napalm.get_network_driver('ios') list_of_devices = ['ios-xe-mgmt-latest.cisco.com'] for device in list_of_devices: connection = driver(hostname=device, username='******', password='******', optional_args={'port': 8181}) connection.open() destination = '8.8.8.8' pp(connection.get_facts()) print('\n \n') pp(connection.is_alive()) print('\n \n') pp(connection.get_interfaces()) print('\n \n') pp(connection.get_lldp_neighbors()) print('\n \n') pp(connection.get_arp_table()) print('\n \n') # pp(connection.get_bgp_neighbors()) # print('\n \n') pp(connection.get_config()) print('\n \n') # pp(connection.get_interface_counters()) # print('\n \n') pp(connection.get_interfaces_ip())
def main(): result = 0 P.custId = getUserInput(Prompt="Customer Id: ") P.owner = db.queryKeyPairs('Owner') P.group = db.queryKeyPairs('Groupname') ResSeparator = db.queryKeyPairs('ResourceIdSeparator') Separator = db.queryKeyPairs('FQDNseparator') # # These input options should be pulled from table "regions" # P.regionList = db.loadRegionTable() abbrvList = [] for x in P.regionList: abbrvList.append(x['descr']) regionIdx = int(getUserInput(List=abbrvList)) -1 # P.region = P.regionList[regionIdx]['AWSRegion'] P.awsAbbrv = P.regionList[regionIdx]['abbrv'] P.resourceBase = ''.join([P.group, ResSeparator, P.custId]) P.fqdnBase = ''.join([Separator, P.custId, Separator, P.awsAbbrv]) regionInfo = P.regionList[regionIdx]['AWSRegion'] cidrInfo = P.regionList[regionIdx]['CIDR'] # # This results in tow name formats. The first is the human readable # right to left format of the AWS resources: # # alm-cliqr... # alm-openvpn... # # It has also produced the base of the fqdn simulation we will be using # # .cliqr.ore # .opendns.ore # print P.resourceBase + ' - ' + P.fqdnBase # # take care of a few general items. # First is get the account id for your AWS account. You can find it # via the web access under your account information. # P.myId = boto3.client('sts').get_caller_identity()['Account'] # # Even though the script allows you to specify a region/data center # the script initially only pulls the regon from your aws configuration # that was set up when you use the awscli to configure your access. # # P.ec2c = boto3.client('ec2', region_name=regionInfo) r = P.ec2c.describe_availability_zones() azs = r.get('AvailabilityZones') az_list = [az.get('ZoneName') for az in azs if az.get('State') == 'available'] P.aZone = az_list[0] # # Now get ready to create resources # P.ec2 = boto3.resource('ec2', region_name=regionInfo) # # First thing to do is to make sure the customer is not already set up in this region # if checkTagSet(P.ec2.vpcs, 'Customer', P.custId)== True: print "Customer: %s already exists"%(P.custId) print "Exiting..." P.resText += "Customer: %s already exists"%(P.custId) result = -1 else: print("Getting Resources from AWS") sys.stdout.flush() P.serverList = db.getServerInfo() # # Now that we have the server info from the database let the user change the default size for storage. # Because I am lazy, and I don't want a confusing mess for the user, only allow one size for secondary # storage. Because only the DB server will should have it, and they all have to be the same anyways. # # BAD CODING ALERT!!! # in P.serverList the servernames are what is used in the DB. If those names change this code breaks!! # VERY BAD DESIGN NEED TO RETHINK THIS!!! # serverSize = {} serverSize['Jira'] = P.serverList['jira']['mainStorage'] serverSize['Conflence'] = P.serverList['conf']['mainStorage'] serverSize['Database'] = P.serverList['pgres1']['extraStorage'] response = '' while response == '': print print "Preconfigured Storage:" for a in serverSize.keys(): print "%s - %sGiB"%(a, serverSize[a]) print "Would you like to change any of these values?" response = getUserInput(Prompt="Y/y = Yes; ") if response in 'Yy': badValue = True while badValue: storage = 1 try: storage = int(getUserInput(Prompt="Storage for Jira: ")) badValue = False except ValueError: print 'you must enter a number, try again' P.serverList['jira']['mainStorage'] = storage badValue = True while badValue: storage = 1 try: storage = int(getUserInput(Prompt="Storage for Confluence: ")) badValue = False except ValueError: print 'you must enter a number, try again' P.serverList['conf']['mainStorage'] = storage badValue = True while badValue: storage = 1 try: storage = int(getUserInput(Prompt="Storage for the database: ")) badValue = False except ValueError: print 'you must enter a number, try again' P.serverList['pgres1']['extraStorage'] = storage P.serverList['pgres2']['extraStorage'] = storage P.serverList['pgpool']['extraStorage'] = storage # pp(P.serverList) # return result # # Only allow AMIs that we have created. # for a in P.ec2.images.filter(DryRun=False, Owners=[P.myId]): for b in P.serverList.keys(): if a.name == P.serverList[b]['AMI']: P.serverList[b]['AMI_Id'] = a.image_id P.serverList[b]['Disks'] = [] # # We have it all, but actually we have more than the rest API will accept in the # device mapping field. We will have to remove the "Encrypted" field from the # "Ebs" dictionaries. There is a way to delete an entry from a dictionary, but I am # brain dead right now so... # devCtr = 0 for devBlk in a.block_device_mappings: # pp(devBlk) P.serverList[b]['Disks'].append({}) P.serverList[b]['Disks'][devCtr]['DeviceName'] = devBlk['DeviceName'] P.serverList[b]['Disks'][devCtr]['Ebs'] = {} P.serverList[b]['Disks'][devCtr]['Ebs']['DeleteOnTermination'] = True P.serverList[b]['Disks'][devCtr]['Ebs']['SnapshotId'] = devBlk['Ebs']['SnapshotId'] # # If the DB wants a larger drive primary drive than the AMI allow it. # Do not allow a smaller drive, there may be a reason it is as big as it is. # if devCtr == 0: if P.serverList[b]['mainStorage'] > devBlk['Ebs']['VolumeSize']: P.serverList[b]['Disks'][devCtr]['Ebs']['VolumeSize'] = P.serverList[b]['mainStorage'] else: P.serverList[b]['Disks'][devCtr]['Ebs']['VolumeSize'] = devBlk['Ebs']['VolumeSize'] elif devCtr == 1: if P.serverList[b]['extraStorage'] > devBlk['Ebs']['VolumeSize']: P.serverList[b]['Disks'][devCtr]['Ebs']['VolumeSize'] = P.serverList[b]['extraStorage'] else: P.serverList[b]['Disks'][devCtr]['Ebs']['VolumeSize'] = devBlk['Ebs']['VolumeSize'] P.serverList[b]['Disks'][devCtr]['Ebs']['VolumeType'] = devBlk['Ebs']['VolumeType'] devCtr = devCtr + 1 # # now the original image may not have specified extra storage so... # check if the server is requesting it and it was not in the AMI. # If that is true add it to the list of devices, as /dev/sdb # if devCtr == 1: if P.serverList[b]['extraStorage'] > 0: P.serverList[b]['Disks'].append({}) P.serverList[b]['Disks'][devCtr]['DeviceName'] = '/dev/sdb' P.serverList[b]['Disks'][devCtr]['Ebs'] = {} P.serverList[b]['Disks'][devCtr]['Ebs']['DeleteOnTermination'] = True P.serverList[b]['Disks'][devCtr]['Ebs']['VolumeSize'] = P.serverList[b]['extraStorage'] P.serverList[b]['Disks'][devCtr]['Ebs']['VolumeType'] = 'gp2' oops = False for b in P.serverList.keys(): if P.serverList[b]['AMI_Id'] == None: if oops == False: P.resText += "Cannot create at least one server, invalid AMI specified" # print "Cannot create at least one server, invalid AMI specified" oops = True p.resText += " Region %s - AMI not available: %s"%(regionIdx, P.serverList[b]['AMI']) # print " Region %s - AMI not available: %s"%(regionIdx, P.serverList[b]['AMI']) # pp(P.serverList) # sys.stdout.flush() if oops == True: result = -1 return result # # Get the CIDR for the vpc # P.DBsubnets = db.getSubnetInfo() # print len(P.DBsubnets) # pp(P.DBsubnets) P.DBsecurity = db.getSecurityInfo(P.custId, P.myId) # pp(P.DBsecurity) # return result # # We have the security info, BUT... # Because security groups reference eachother by security group id # this table is not quite complete. It cannot be complete until we # have the security group ids. So we need to create them BUT... # # But before we can do that we need the VPC in existance because # security groups are associated with a single vpc... # # print "Creating VPC for %s"%(P.custId) P.resText += "Creating VPC for %s"%(P.custId) sys.stdout.flush() try: P.vpc = P.ec2.create_vpc(CidrBlock=cidrInfo) except Exception as exc: print "OOOPs!!! there is an error!!!" resCapture("Creating VPC Resource: {}".format(P.resourceBase), exc) result = -1 return result setTags(P.vpc.id, P.resourceBase) print " Creating Security Groups" P.resText += " Creating Security Groups" sys.stdout.flush() counter = 0 for groupname in P.DBsecurity.keys(): sg = P.vpc.create_security_group(DryRun = False, GroupName = groupname, Description = "Security group for %s"%(groupname)) setTags(sg.id, ''.join([P.resourceBase, ResSeparator, groupname])) P.DBsecurity[groupname]['resource'] = sg # # We currently have an ALMOST complete security group profile. # what is missing is the Amazon Resource IDs are not filled into the # "GroupId" of thIdGroupPairs" Loop through every entry and fill in the # value saved in the "resource" key # for groupname in P.DBsecurity.keys(): counter = 0 inLength = len(P.DBsecurity[groupname]['Inbound']) while counter < inLength: sgGroups = P.DBsecurity[groupname]['Inbound'][counter]['UserIdGroupPairs'] numGroups = len(sgGroups) if numGroups > 0: innerCtr = 0 while innerCtr < numGroups: refGroup = sgGroups[innerCtr]['GroupId'] P.DBsecurity[groupname]['Inbound'][counter]['UserIdGroupPairs'][innerCtr]['GroupId'] = P.DBsecurity[refGroup]['resource'].id innerCtr = innerCtr + 1 sg = P.DBsecurity[groupname]['resource'] # print counter counter = counter + 1 # pp(P.DBsecurity[groupname]) sg.authorize_ingress(DryRun = False, IpPermissions = P.DBsecurity[groupname]['Inbound']) # # We are not yet dealing with egress rules, though you can put them in the database # so just let everything out until I have time to handle this. (this is the default) # # # Create and attach a Gateway # print " Creating gateway" P.resText += " Creating gateway" P.gateway = P.ec2.create_internet_gateway() P.gateway.attach_to_vpc(VpcId=P.vpc.id) setTags(P.gateway.id, P.resourceBase) # # The previous command did not just create a vpc it also created # a route table, which is not what the documentation says it will do. # The documentation implies that the route table would still need to # be created. We need to get the ID of that route table so we can apply # the tags to it, and so that we can associate the subnets to it. # # Normally something that is iterable has an iterator attached to it. # Such is not the cae for resources returned by Boto3... # Since there is only one route table at this point this cheat will work # for b in P.vpc.route_tables.all(): P.routeTable = P.ec2.RouteTable(b.id) # # Here we need to create a route and associate it with the internet gateway # P.routeTable.create_route(DryRun=False, DestinationCidrBlock='0.0.0.0/0', GatewayId=P.gateway.id) setTags(P.routeTable.id, P.resourceBase) for snet in P.DBsubnets: tSnet = P.ec2.create_subnet(DryRun=False, VpcId=P.vpc.id, CidrBlock=P.DBsubnets[snet]['CIDR'], AvailabilityZone=P.aZone) P.DBsubnets[snet]['sNet_Id'] = tSnet.id P.routeTable.associate_with_subnet(DryRun=False, SubnetId=tSnet.id) setTags(tSnet.id, ''.join([P.resourceBase, ResSeparator, P.DBsubnets[snet]['Name']])) P.subnets.append(tSnet) # pp(P.DBsubnets) pp(P.subnets) # return print " Creating VMs" P.resText += " Creating VMs" # # Need to get security key id... # but right now who cares... I am just going to delete these for quite a while # try: for a in P.serverList.keys(): # print "Starting server: %s image - %s"%(a, P.serverList[a]['AMI_Id']) if P.serverList[a]['AMI_Id'] != None: # print " subnet: %s"%(P.serverList[a]['subnet']) sNetId = P.DBsubnets[P.serverList[a]['subnet']]['sNet_Id'] pubAddr = False if P.DBsubnets[P.serverList[a]['subnet']]['pubPriv'] == 1: pubAddr = True print " Standing up server %s in subnet %s"%(a, sNetId) P.resText += " Standing up server %s in subnet %s"%(a, sNetId) for b in P.subnets: if b.id == sNetId: sgId = P.DBsecurity[P.serverList[a]['securityGroup']]['resource'].id localIp = P.serverList[a]['localIp'] vm = P.ec2.create_instances(DryRun=False, ImageId = P.serverList[a]['AMI_Id'], MinCount = 1, MaxCount = 1, KeyName = P.serverList[a]['securityKey'], InstanceType = P.serverList[a]['InstType'], DisableApiTermination = True, InstanceInitiatedShutdownBehavior='stop', NetworkInterfaces = [{ 'DeviceIndex' : 0, 'SubnetId' : sNetId, 'Groups' : [sgId], 'PrivateIpAddress': P.serverList[a]['localIp'], 'AssociatePublicIpAddress': pubAddr }], BlockDeviceMappings = P.serverList[a]['Disks'] ) # # Since we are only standing up one at a time, only # one instance will get its tags set # # Did I ever mention that while I really like the Python's flexibility # I REALLY HATE having to keep track in indentation... Just sayin # for instance in vm: setTags(instance.id, ''.join([P.resourceBase, ResSeparator, a])) instance.wait_until_running(DryRun=False, Filters = [ { 'Name' : 'instance-id', 'Values' : [instance.id,] }, ]) # # Now that the instance is running get all the information again. # This will populate the external IP address if it was requested. # instance.reload() if a == 'jump': P.jumpExtIP = instance.public_ip_address setTags(instance.id, ''.join([P.resourceBase, ResSeparator, a])) # # print print "Add the following lines to the file /etc/ssh/ssh_config" print print "Host %s"%(''.join(["*", P.fqdnBase])) print " ForwardAgent yes" print " User ec2-user" print " ProxyCommand ssh \%r\@{} nc \%h \%p -w 10".format(P.jumpExtIP) print print print print "Use the following lines to replace the /etc/hosts file on the jump server" print print "127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4" print "::1 localhost localhost.localdomain localhost6 localhost6.localdomain6" print for a in P.serverList.keys(): if a != 'jump': print "%s %s"%(P.serverList[a]['localIp'], ''.join([a, P.fqdnBase])) print # # Just found out there is a network resource created when an instance # is created. Currently it has no tags so you can not see what is attached to what. # Not going to handle this now, but it should be done at some point in time # for completeness reasons # except Exception as exc: print "OOOPs!!! there is an error!!!" resCapture("Creating EC2 instances: {}".format(''.join([P.resourceBase, ResSeparator, a])), exc) result = -1 return result
import tools print(tools.distance_straight(s1.coords, s2.coords)) import mapm mo = mapm.Map(locations=[s1, s2]) import gmaps cities_names = ['Moscow', 'Sarov', 'Krasnogorsk', 'Zelenograd', 'Dubna'] sample_filename = tools.write_sample_cities_data_to_file(cities_names) locs_list = create_locations_list(sample_filename) str_lst = [loc.to_str() for loc in locs_list] for el in str_lst: print(el) origin = Location("Moscow", name='Moscow') raw_routes = [] for location in [locn for locn in locs_list if locn.name != origin.name]: raw_routes.append(gmaps.get_route(origin.coords, location.coords)) #!!! rewrite: from operator import itemgetter raw_routes.sort(key=itemgetter(-1)) # sort by duration from pprint import pprint as pp pp(raw_routes) import plot_routes add_points_coords_list = [(point.coords['lat'], point.coords['lng']) for point in locs_list] add_points_annotes_list = [point.name for point in locs_list] plot_routes.plot_route_on_basemap( raw_routes[0][0], raw_routes[0][1], [add_points_coords_list, add_points_annotes_list ]) # plots nearest route
'This permission should only be granted to OSF administrators. Allows a site to ' 'create, read, edit, and delete all information associated with this account.', is_public=False), }) def normalize_scopes(scopes): """ Given a list of public-facing scope names from a CAS token, return the list of internal scopes This is useful for converting a single broad scope name (from CAS) into the small constituent parts (as used by views) :param list scopes: a list public facing scopes """ all_scopes = set() for sc in scopes: try: scope_tuple = public_scopes[sc] all_scopes |= scope_tuple.parts except KeyError: pass return all_scopes if __name__ == '__main__': # Print some data to console, to help audit what views/core scopes map to a given public/composed scope # Although represented internally as a set, print as a sorted list for readability. from pprint import pprint as pp pp({k: sorted(v.parts) for k, v in public_scopes.items()})
else: base_carrier = 'usps' if track_only: print 'Track Only' try: response = self._get_job_data(cr, uid, job, \ 'sales_order_shipment.addTrack', [incrementid, base_carrier, \ 'Shipped', package.tracking_number]) except Exception, e: print 'Exception', e return False else: print 'Shipment Then Track' item_data = self.get_item_data_from_package(cr, uid, package) pp(item_data) try: response = self._get_job_data(cr, uid, job, \ 'sales_order_shipment.create', [incrementid, item_data, 'Order Has Shipped', True, False]) self._get_job_data(cr, uid, job, \ 'sales_order_shipment.addTrack', [response, base_carrier, \ 'Shipped', package.tracking_number]) except Exception, e: print 'Exception', e return False return response def get_item_data_from_package(self, cr, uid, package): picking = package.picking picking_obj = self.pool.get('stock.picking')
finally: CloseHandle(hProcessSnap) from contextlib import contextmanager from actpy.release import nt_service_name def is_running_as_nt_service(): @contextmanager def close_srv(srv): try: yield srv finally: ws.CloseServiceHandle(srv) try: with close_srv( ws.OpenSCManager(None, None, ws.SC_MANAGER_ALL_ACCESS)) as hscm: with close_srv( wsu.SmartOpenService(hscm, nt_service_name, ws.SERVICE_ALL_ACCESS)) as hs: info = ws.QueryServiceStatusEx(hs) return info['ProcessId'] == getppid() except Exception: return False if __name__ == '__main__': from pprint import pprint as pp pp(listdir('../report', True))
version_router.register( r'Version', ReversionsViewSet, base_name=vclass.model_class.__name__.lower() + "-versions", ) vrouters.append(version_router) # serious views # so this does not work; need to check how to make this REST: # router.register("execute/", # proposal.views.ExecuteTemplates.as_view(), # "execute") # pp(router.get_urls()) from django.conf.urls import url, include urlpatterns = [ url(r'^instantiate/(?P<pk>\d+)/', proposal.views.ExecuteTemplates.as_view(), name="instantiate_one"), url(r'^instantiate/', proposal.views.ExecuteTemplates.as_view(), name="instantiate_all"), url(r'^createLatex/(?P<pk>\d+)$', proposal.views.CreateLatex.as_view(), name="create_latex"), url(r'^createLatex/$', proposal.views.CreateLatex.as_view(),
elif choice == '3': remove_contact() elif choice.upper() == 'X': #exit() #return break else: print('INVALID CHOICE') print('SEE YA LATER!!') # contacts.clear() # removes all c2 = contacts.copy() c2 = contacts[:] # identical pp(c2) c2.pop(0) print('removed 0 from c2') pp(contacts) pp(c2) print(contacts.count('Paul')) # how many of a specific value # WILL BE 0 BECAUSE 'Paul' != 'Paul,555-1313' print('extending contacts with c2') contacts.extend(c2) # DIRECTLY MODIFIES contacts pp(contacts) all_contacts = contacts + c2 # SIMILAR but creates a copy print(all_contacts)
# DISKS disk = psutil.disk_partitions() print(disk) #psutil.disk_usage('/') # #Process consuming for p in psutil.process_iter(['name', 'open_files']): for file in p.info['open_files'] or []: if file.path.endswith('.log'): print("%-5s %-10s %s" % (p.pid, p.info['name'][:10], file.path)) # Top 3 process consuming pp([(p.pid, p.info['name'], sum(p.info['cpu_times'])) for p in sorted(psutil.process_iter(['name', 'cpu_times']), key=lambda p: sum(p.info['cpu_times'][:2]))][-3:]) # Processes consuming more than 500M of memory: pp([(p.pid, p.info['name'], p.info['memory_info'].rss) for p in psutil.process_iter(['name', 'memory_info']) if p.info['memory_info'].rss > 500 * 1024 * 1024]) #pid of processes by name name = 'tmux' def get_pid(name): return check_output(["pidof", name]) print('tmux pid')
def config(): """ Dump config """ pp(current_app.config)
lines = """93 SignatureLength N Required when trailer contains signature. Note: Not to be included within SecureData field 89 Signature N Note: Not to be included within SecureData field 10 CheckSum Y (Always unencrypted, always last field in message)""" from pprint import pprint as pp import string lines = lines.split("\n") lines = [ x for x in lines if x and x[0] in string.digits ] trailerData = [] for line in lines: #print line x = line.split()[:3] trailerData.append( x ) if __name__=='__main__': pp(trailerData)
def repl(self): self.repout() getin = None try: assert sys.version_info >= (3, 0) getin = input except: getin = raw_input while True: try: line = getin() except: break line.strip() if re.match(r'^show', line): self.repout() elif re.match(r'^set (.*)', line): let, to = re.match(r'^set (.*)', line).group(1).split('=') self.stor[let] = to self.repout() elif re.match(r'^unset (.*)', line): let = re.match(r'^unset (.*)', line).group(1) self.stor.pop(let) self.repout() elif re.match(r'^dict', line): pp(self.stor) elif re.match(r'^grep', line): self.grepout() elif re.match(r'^popular', line): self.apply_popular() self.repout() elif re.match(r'^reset', line): self.stor = {} self.repout() elif re.match(r'^find', line): ww = re.match(r'^find (.*)', line).group(1) cs = self.find_candidates(ww) if len(cs) == 1: self.setword(ww, cs[0]) self.repout() else: pp(cs) elif re.match(r'^setword', line): res = re.match(r'^setword (.*)=(.*)', line) self.setword(res.group(1), res.group(2)) self.repout() elif re.match(r'^help', line): print(helptext) self.repout() elif re.match(r'^brute', line): words = self.pre_bf() self.bruteforce(words) self.repout() for _ in range(1, 5): self.stor = {} print(' ') self.bruteforce(words) self.repout() print('Done') else: print("Buh?")
return f"{base_url}&keywords={param.replace(' ','%20')}" def fetch_html_then_return_soup(url: str, file_path: str = './templates/input.html' ) -> str: r = requests.get(url) with open(file_path, 'wb') as f: logger.debug(f"Writing content from {url} to {file_path}.") f.write(r.content) logger.debug(f"Returning BeautifulSoup html.parser from {file_path}") return BeautifulSoup(open(file_path), 'html.parser') if __name__ == '__main__': pp(indent=2) soup = fetch_html_then_return_soup(url_builder()) articles = soup.find_all('div', class_="flex_growChildren") data = [] for article in articles: d = { 'topic': '', 'title': '', 'author': '', 'link': '', 'image': '', 'description': '' } d['topic'] = article.span.text.replace('\n', '').replace( " ", ' ').strip()
def save(self, tile, request): tile_edits = json.loads(request.POST.get('data'))['data'] if request: address = { "geometry": { "spatialReference": { "wkid": 102100, "latestWkid": 3857 } }, "attributes": { "EAS_BaseID": None, "EAS_SubID": None, "CNN": None, "Address": None, "Address_Number": None, "Address_Number_Suffix": None, "Street_Name": None, "Street_Type": None, "Unit_Number": None, "Zipcode": None, "Block_Lot": None, "Longitude": None, "Latitude": None, "Location": None } } payload = { "adds":[], "updates":[], "deletes":'', "attachments":[], "rollbackOnFailure": False, "useGlobalIds": False, "f": "pjson", "token": "tTzVkJ7RPpZmqmlxc7xVBaORWK8vIKQenSkbmK13OnDfIHNKaNCIaH3i6Nz1AUbdnqkEsz8HuA-QqYrndP4yyqgov0NUgabK3lOO19erL-YYPtbIhEzahbSeQ0wPkJx1TH7RVL-gJ9m3iBsV9Affr0NczrLunSdj6rsa1Kg4QI8fTWpdgj0VCy7FaANWggjI6b7kDATtb43W9-hHxmndcjEU9S7lBzCfTty1b4GnAF3dmYhoh4ZBLC-XpsLetKEJ" } field_lookup = { "29862afe-4746-11e8-88b1-0242ac120006": "Block_Lot", "1a08f610-4746-11e8-b7cc-0242ac120006": "Street Name", "1a08fbd8-4746-11e8-b7cc-0242ac120006": "Address_Number", "1a08f3cc-4746-11e8-b7cc-0242ac120006": "Address_Number_Suffix", "1a08f80e-4746-11e8-b7cc-0242ac120006": "CNN", } geometry_node = '2ad20702-4746-11e8-a9a0-0242ac120006' result_node = models.Node.objects.get(pk='1a08f3cc-4746-11e8-b7cc-0242ac120006') external_reference = Tile.objects.filter(nodegroup=result_node.nodegroup).filter(resourceinstance=tile.resourceinstance_id) tiles = Tile.objects.filter(resourceinstance=tile.resourceinstance_id) has_geom = False for tile in tiles: for tile_node, tile_value in tile.data.iteritems(): # if models.Node.objects.get(pk=tile_node).datatype == 'geojson-feature-collection': if tile_node == geometry_node: geom = GEOSGeometry(json.dumps(tile_value['features'][0]['geometry']), srid=4326) geom.transform(3857) address['geometry']['x'] = geom.x address['geometry']['y'] = geom.y has_geom = True if tile_node in field_lookup: address["attributes"][field_lookup[tile_node]] = str(tile_value) for edit_node, edit_value in tile_edits.iteritems(): if edit_node == geometry_node: geom = GEOSGeometry(json.dumps(edit_value['features'][0]['geometry']), srid=4326) geom.transform(3857) address['geometry']['x'] = geom.x address['geometry']['y'] = geom.y has_geom = True if edit_node in field_lookup: address["attributes"][field_lookup[edit_node]] = str(edit_value) if has_geom: if len(external_reference) != 0: address["attributes"]["FID"] = int(external_reference[0].data["1a08f3cc-4746-11e8-b7cc-0242ac120006"]) payload["updates"].append(address) else: payload["adds"].append(address) data = urllib.urlencode(payload).replace('None', 'null') url = self.config['external_address_url'] + '/applyEdits' req = urllib2.Request(url, data) f = urllib2.urlopen(req) response = f.read() response = json.loads(response) pp(payload) pp(response) if len(response['addResults']) > 0: if response['addResults'][0]['success'] == True: result_tile = models.TileModel() result_tile.resourceinstance = models.ResourceInstance.objects.get(pk=tile.resourceinstance_id) result_tile.data = {"1a08f3cc-4746-11e8-b7cc-0242ac120006": str(response['addResults'][0]['objectId'])} result_tile.nodegroup = result_node.nodegroup result_tile.save() f.close() return tile
def test_transform_to_lower(self): self.config_man.add_env_vars(self.var_list, key_to_lower=True) pp(self.config_man.cm_config_env) assert self.config_man.cm_config_env['cletus_foo'] == 'bar' assert self.config_man.cm_config['cletus_foo'] == 'bar'
import requests from pprint import pprint as pp r = requests.get('https://api.github.com/events') pp(r.content)
def test_add_vars_using_arg_list(self): self.config_man.add_env_vars(self.var_list) pp(self.config_man.cm_config_env) assert self.config_man.cm_config_env['CLETUS_FOO'] == 'bar'
import SoftLayer.API from pprint import pprint as pp apiUsername = '' apiKey = '' client = SoftLayer.Client( username=apiUsername, api_key=apiKey, ) # Virtual Guest Id guest_id = 123456 guest = client['Virtual_Guest'].getObject(id=guest_id, mask="mask.monitoringUserNotification") notification_id = guest['monitoringUserNotification'][0]['id'] # Create a list of SoftLayer_User_Customer_Notification_Virtual_Guest objects # Only the id property is required objects = [ {'id': notification_id} ] # deleteObjects will return a bool result = client['User_Customer_Notification_Virtual_Guest'].deleteObjects(objects) pp(result)
from pprint import pprint as pp def logtolist(fname): import re logs = list() fields = ('ip_address', 'timestamp', 'request') mask = re.compile(r'(\d+\.\d+\.\d+\.\d+).+\[(.+)\]\s+\"(.+?)\"') with open(fname, 'rt') as logfile: for log in logfile: res = mask.search(log.strip()) if res: logs.append({k:v for k, v in zip(fields, res.groups())}) return logs pp(logtolist('mini-access-log.txt'))
def test_add_vars_using_schema(self): config_man = mod.ConfigManager(config_schema=self.config_schema) config_man.add_env_vars() pp(config_man.cm_config_env) assert config_man.cm_config_env['CLETUS_FOO'] == 'bar'
lines = mf.readlines() for line in lines[2:]: line = line.split("|") SIDStudentDict[line[1].strip()] = line[0].strip() return SIDStudentDict def getListOfCircuits(): with open('projects.txt', 'r') as mf: lines = mf.readlines() circuits = [] for line in lines[2:]: circuits.append(line.split()[0]) return circuits if __name__ == "__main__": # answer1 = getComponentCountByProject("082D6241-40EE-432E-A635-65EA8AA374B6") # answer2 = getComponentCountByStudent('S, Joe') # answer3 = getParticipationByStudent('Adams, Keith') # answer4 = getParticipationByProject('90BE0D09-1438-414A-A38B-8309A49C02EF') # answer5 = getProjectByComponent({'T71.386', 'C407.660', 'L103.001'}) # answer6 = getStudentByComponent({'T71.386', 'C407.660', 'L103.001'}) # answer7 = getComponentByStudent({'Gray, Tammy', 'Allen, Amanda', 'Baker, Craig'}) # answer8 = getCommonByProject('90BE0D09-1438-414A-A38B-8309A49C02EF', '96CC6F98-B44B-4FEB-A06B-390432C1F6EA') # answer9 = getCommonByStudent('Allen, Amanda', 'Adams, Keith') # answer10 = getProjectByCircuit() # answer11 = getCircuitByStudent() answer12 = getCircuitByStudentPartial('Martin') pp(answer12)
) if payment.type == 'card': payment.last_four = input("Last four digits of card: ") elif payment.type == 'gift_card': payment.gift_card_type = input("Type of gift card or coupon") receipt.add_payment(payment) if strtobool( input("Do you have merchant information to add? (y/n)\n").lower()): merchant = Merchant(name=input("Name: "), online=strtobool(input("Online (y/n): ")), phone=input("Phone: "), email=input("Email: "), store_name=input("Branch: "), store_address=input("Address (without postcode): "), store_postcode=input("Postcode: ")) receipt.set_merchant(merchant) schema = ReceiptSchema() pp(schema.dump(receipt)) if strtobool(input("Does this look right? (y/n)\n")): receipt_r = client.create_reciept(receipt) if receipt_r: print('All ok!') else: print('Failure') print(receipt_r.data) else: exit()