def __init__(self, sc=None): sc = slackclient.SlackClient(config.token) print('Kronk is live:') pp(sc.auth) self.sc = sc self.channel = sc.channel(config.channel_name) self.event_stream = sc.monitor('channels', self.channel['id']) self.own_mention = '<@{}>'.format(sc.auth['user_id'])
def handle(self): calendar_id = self.argument('calendar_id') updated_min = self.argument('updated_min') updated_min = pdl.parse(updated_min) self.info( f'Calendar Id: {calendar_id}. Events Updated after {updated_min}') result = calendar.client.list_recent_updated(calendar_id, updated_min) pp(result)
def calendar_notification(request: http.Request) -> dict: print({ 'method': request.method, 'url': request.url, 'headers': dict(request.headers), 'body': request.body.decode('utf-8') }) pp(dict(request.headers)) return http.Response()
def print_log(text, logging=True, pretty=False, type="info"): # verbosity unused if type == "info" and logging: if pretty: pp(text) else: print(text) elif type == "error": print(text)
async def details(ctx, pkg_name, index): """Clear the bring cache dir in the relevant locaiont (e.g. '~/.cache/bring' on Linux).""" bring: Bring = ctx.obj["bring"] pkg = await bring.get_pkg(pkg_name) vals = await pkg.get_values("metadata") import pp # type: ignore pp(vals)
def trello_add_appointment_req(appointment_req: AppointmentRequest): print('Appointment Request Received:') print(appointment_req) card_desc = (('新增到 Google Calendar: ' f'{appointment_req.format_add_to_calendar_link()}\n\n') + appointment_req.format_details()) appointment_req_tlist = trello.agent.tlist_of_name_opt('預約').value card = appointment_req_tlist.add_card( name=appointment_req.format_title(), position='bottom', desc=card_desc, ) print('Card added:') pp(card) return {}
def add_measuring_card(self, sess, event): self.info('Adding measuring card') measuring_tlist = trello.agent.tlist_of_name_opt('丈量').value card_name = re.match(r'量\W*(\w.*)', event['summary']).groups()[0] desc = f"丈量: {event['htmlLink']}" card = measuring_tlist.add_card(card_name, position='top', desc=desc, assign=[self.assignee]) mapping = db.TrelloCalendarMapping( card_id=card.id, calendar_id=self.calendar_id, calendar_event_type=db.EventType.measuring, calendar_event_id=event['id']) pp(card.__dict__) pp(mapping) sess.add(mapping)
def handle_measuring_event(self, sess, event): trello_mapping = (sess.query(db.TrelloCalendarMapping).filter_by( calendar_event_id=event['id']).one_or_none()) self.info('Existing Event <-> Card Mappings:') pp(trello_mapping) if (event['summary'].startswith('量')): if trello_mapping is None: self.add_measuring_card(sess, event) elif trello_mapping.calendar_id != self.calendar_id: self.info('Updating measuring card assignee') trello_mapping.calendar_id = self.calendar_id card = trello.client.get_card(trello_mapping.card_id) card.assign(self.assignee) card.change_pos('bottom') else: self.info(f'Nothing to do for event {event["summary"]}')
def cmd_parse(self, statement): try: values = self.cmd_apply_lookups(statement) first = values.pop(0) if first.endswith(':'): # Assignment operator target = first[:-1] cmd = values.pop(0) else: target = None cmd = first return target, cmd, values except Exception as ex: print('Error parsing statement:') print('{}: {}'.format(ex.__class__.__name__, ex)) print(' statement:', statement) print(' stack:') pp(self.stack.frames) raise
def handle(self): self.calendar_id = self.argument('calendar_id') self.updated_min = pdl.parse(self.argument('updated_min')) self.assignee = (Optional.from_value(self.argument('assignee')).map( lambda assignee: trello.agent.get_member(assignee)).get_or_none()) self.info(f'Calendar Id: {self.calendar_id}. ' f'Events Updated after {self.updated_min}') result = calendar.client.list_recent_updated(self.calendar_id, self.updated_min) pp(result) items = result.get('items', []) sess = db.Session() for event in items: if event['status'] == 'confirmed' and \ event['summary'].startswith('量'): self.info(f'Handling event {pp.fmt(event)}') self.handle_measuring_event(sess, event) sess.commit() sess.close()
return df def df_clean(df): df = df.copy() df = df_normalize_col_casing(df) if {'lng', 'lat'}.issubset(df): df['lonlat'] = df[['lng', 'lat']].apply(lambda lng_lat: LonLat(*lng_lat), axis=1) df = df.drop(['lng', 'lat'], axis=1) return df def df_normalize_col_casing(df): return df.rename(columns=snakecase) def df_with_dist(df, lonlat: LonLat): df = df.copy() df['query_lonlat'] = [lonlat] * len(df) df['dist_km'] = df.apply(lambda x: haversine(x['lonlat'], x['query_lonlat']), axis=1) return df if __name__ == '__main__': from api.app import new_app; new_app() pp(_raw_barcharts(['L5532282'])) pp(barcharts(['L5532282'])) pp(nearby_hotspots(test_lonlats['home'])) pp(nearby_barcharts(test_lonlats['home']))
def handle(self): trello.board.all_lists() for tlist in trello.board.list_lists(): pp(tlist.__dict__)
def handle(self): for card in trello.board.all_cards(): pp(card.__dict__)
import requests import pp import datetime city = input("Enter the city:") url = 'https://api.openweathermap.org/data/2.5/weather?q={}&appid=7f942b5c9115843184a274294cb88783&units=matrics'.format( city) res = requests.get(url) data = res.json() x = datetime.datetime.now() d = int(x.strftime("%d")) print(d) timezone = data['timezone'] print('Timezone:{}'.format(timezone)) if d > 1: for i in range(2, d): if (d % i) == 0: print(d, "Date is not a prime so no date") break else: print(d, "Date is a prime number") pp(data) else: print(d, "Date is not a prime so no date")
def test_pp(self): pp(["hello", "world"]) self.assertStdout("['hello', 'world']")
########################################################### input: SS=[ [5, 6, 7, 8, 30, 31, 1, 3], [11, 12, 13, 2, 1, 32], [14, 15, 16, 33, 17, 18, 4, 3, 19], [20, 2, 21, 22, 23], [24, 34, 1, 26, 27] ] outout: [[ 5, 6, 7, 8, 30, 31, 1, 3, 0], [11, 12, 13, 2, 1, 32, 0, 0, 0], [14, 15, 16, 33, 17, 18, 4, 3, 19], [20, 2, 21, 22, 23, 0, 0, 0, 0], [24, 34, 1, 26, 27, 0, 0, 0, 0] ] pp(SS) def PaddingList2D(SS): MaxCols=max([len(r) for r in SS]) for k,S in enumerate(SS): S += [0] * (MaxCols - len(S)) return SS SS=PaddingList2D(SS) pp(SS) --------------------------------------------------- a = [[1, 2, 3], [4, 5], [6, 7, 8, 9]] import numpy as np b = np.zeros([len(a),len(max(a,key = lambda x: len(x)))]) for i,j in enumerate(a):
#!/usr/bin/env python # encoding: utf-8 import pp def foo_method(x): print locals() foo_method(1) # {'x': 1} print(issubclass(foo_method.__class__, object)) # True, function is object, everything is object pp(globals()) """ { '__builtins__': <module '__builtin__' (built-in)>, '__doc__': '\nDecorator demos\n', '__file__': './decorator_demo.py', '__name__': '__main__', '__package__': None, 'foo_method': <function foo_method at 0x7f158ed03a28>, 'pp': <module 'pp' (pp_magic_module with pprint_mod='pprintpp')>, } """ print("function basic") print("Keyword arguments, compare with positional arguments") def f1(self, arg=0.88): print(locals()) print(arg) f1(0.99)
r = requests.get(path, params=params, headers=headers) # raise exception if response code is not HTTP SUCCESS (200) r.raise_for_status() if r.headers['content-type'] == 'application/json': return r.json() # parse json responses automatically return r r = get(baseUrl) print("Name of response dictionary key: ", r.keys()) print("Number of simulations: \t \t ", len(r['simulations']), "\n") print("\n Fields of first sim.:") pp(r['simulations'][0]) names = [sim['name'] for sim in r['simulations']] print("\n \n Names of all simulations:") pp(names) Sim = "TNG100-1-Dark" i = names.index(Sim) # names.index('Illustris-3') print(f"\n \n Index of simulation ({Sim}): {i}") sim = get(r['simulations'][i]['url']) print("\n \n Keys:") pp(sim.keys()) print("\n \n Number of dark matter particles: ", sim['num_dm']) print("\n Snapshots URL: \t \t ", sim['snapshots']) snaps = get(sim['snapshots']) print("\n Number of snapshots: \t \t ", len(snaps))
continue else: for k1 in kma.keys(): print kma[k1][0] kma[k1] = kma[k1][0] keys = r.keys() pp.pprint(r) with open("riad.pkl", "rb") as f: riad = cPickle.load(f) print riad.keys() r = riad['mapping'] a = riad['address'] b = riad['bogmapping'] pp(r['1000']) print r['1000']['RIAD_NAME'][0][1] pp(a) r = riad['mapping'] for k,v in renameDict.iteritems(): r[v] = r.pop(k) pp.pprint(r['MFMC008']) pp.pprint(r['IVF2081']) pp.pprint(r['IVF9011']) pp.pprint(r['AEPEY1']) with open("riad.pkl", "rb") as f: riad = cPickle.load(f) r = riad['mapping'] a = riad['address']
def handle_events(self): print('Checking for new events...') for event in next(self.event_stream): pp(event) self.handle(event)
def DiscoverAllArtists(): toprint = "All Your Artists" uploadimage(toprint, discoalluri) offsetvar = 0 offsetvar2 = 0 deleteoffset = 0 all_tracks = [] pp("Deleting Old Tracks...") while deleteoffset < 300: for track in sp.playlist_tracks(discoalluri, limit=100)["items"]: pp("Deleting: " + track["track"]["name"]) sp.playlist_remove_all_occurrences_of_items( discoalluri, [track["track"]["uri"]]) deleteoffset += 100 print(deleteoffset) while offsetvar < 250: for playlist in sp.current_user_playlists(limit=50, offset=offsetvar)["items"]: pp("Searching..." + playlist["name"]) for song in sp.playlist_tracks(playlist["uri"])["items"]: all_tracks.append([ song["track"]["name"], song["track"]["artists"][0]["name"] ]) #pp("Added: " + song["track"]["uri"]) offsetvar += 50 print("End Playlist Search. Adding Songs...") while offsetvar2 < 250: for playlist in sp.current_user_playlists(limit=50, offset=offsetvar2)["items"]: try: pp("Searching..." + playlist["name"]) for song in sp.playlist_tracks(playlist["uri"])["items"]: newsong = sp.artist_top_tracks( song["track"]["artists"][0]["uri"])["tracks"][0] try: newsong2 = sp.artist_top_tracks( song["track"]["artists"][0]["uri"])["tracks"][1] except: pass if [newsong["name"], newsong["artists"][0]["name"]] not in all_tracks: sp.user_playlist_add_tracks(usernamevar, discoalluri, [newsong["uri"]]) all_tracks.append( [newsong["name"], newsong["artists"][0]["name"]]) pp("Added Track: " + newsong["name"]) try: if [newsong2["name"], newsong2["artists"][0]["name"] ] not in all_tracks: sp.user_playlist_add_tracks( usernamevar, discoalluri, [newsong2["uri"]]) all_tracks.append([ newsong2["name"], newsong2["artists"][0]["name"] ]) pp("Added Track: " + newsong2["name"]) except: pass except: print("All Songs Added") break offsetvar2 += 50
#return "Blog <{0}>, author: {1}".format(self.title, self.author) def __del__(self): """ Destructor """ class_name = self.__class__.__name__ print class_name, "destroyed" class WayneBlog(Blog): def __init__(self): Blog.__init__(self) #Blog.static_method(1, "2", 3) #wayne_blog = WayneBlog.build_from_props("Wayne", "My journey on python") wayne_blog = WayneBlog() wayne_blog.author = "Wayne" wayne_blog.title = "My journey on python" #wayne_blog.comments = ['foo comment'] # AttributeError: can't set attribute wayne_blog.comments.append('a') pp(vars(wayne_blog)) print wayne_blog print wayne_blog.title pp(issubclass(WayneBlog, Blog)) pp(isinstance(wayne_blog, Blog)) pp(isinstance(wayne_blog, WayneBlog)) #mc = Blog()
import pp pp(["hello", "world"]) pp.pprint("stuff") print pp.pformat("asdf") print pp.fmt("stuff") print dir(pp) print repr(pp)
def preprocessing() ''' genereate data from wide-and-deep-keras.pp() array: train_data_category train_data_conti test_data_category test_data_conti label : train_label test_label dataframe: all_data ''' train_data_category,train_data_conti,test_data_category,test_data_conti,train_label,test_label,all_data = pp() return train_data_category,train_data_conti,test_data_category,test_data_conti,train_label,test_label,all_data class DEEP_AND_FM(): ''' deep and fm class''' def __init__(self,layers = [32,32,32 ] , learning_rate = 0.001,batch_size = 256,epochs = 10 ,use_fm = True,use_deep = True \ embedding_size = 8 , optimizer = 'adam', feature_size,field_size, self.loss_type = 'logloss' ): self.feature_size = feature_size # sample_size self.field_size = field_size # features dim self.learning_rate = learning_rate self.batch_size = batch_size self.epochs = epochs self.use_fm = use_fm self.use_deep = use_deep self.embedding_size = embedding_size self.layers = layers