async def handle(ge, msg, base_settings): author = msg.author msg_text = str(msg.content) server_id = str(msg.guild.id) channel_id = str(msg.channel.id) server_name = str(msg.guild.name) channel_name = str(msg.channel.name) msg_time = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) log_text = "[" + msg_time + "] " + server_id + ", " + channel_id + " (" + server_name + ", " + channel_name + "), " + str( author) + ": " + msg_text settings = base_settings template_json = get_json(settings["template_path"]) for template_setting in template_json["server_template"]["settings"]: settings[template_setting] = template_json["server_template"][ "settings"][template_setting] in_list = True servers = get_json(settings["server_path"]) try: server = servers["servers"][server_id] for setting in server["settings"]: settings[setting] = server["settings"][setting] except KeyError: in_list = False if in_list == False: ge.log( "[Server json not found, creating copy of server json template...]" ) server_list = add_server(servers, template_json["server_template"], server_id, server_name) set_json(settings["server_path"], server_list) # executed if the message is a command if msg_text.startswith(settings["prefix"]): ge.log(log_text) cmd_context = Context() cmd_context.ge = ge cmd_context.msg = msg cmd_context.settings = settings await run_cmd(cmd_context) else: if settings["log"] == True: ge.log(log_text) elif str(author.id) in [str(ge.user.id), "163997823245746177"]: ge.log(log_text) if settings["reaction_rarity"] >= 0: await reaction(ge, msg, settings["path"] + "/data/reaction_words.txt", settings["reaction_rarity"])
def get_videoid_by_channel_id(self, channel_id): channel_info = get_json( rf'https://www.googleapis.com/youtube/v3/search?part=snippet&' rf'channelId={channel_id}&eventType=live&maxResults=1&type=video&' rf'key={self.api_key}') # 判断获取的数据是否正确 try: item = channel_info['items'][0] except KeyError: self.logger.exception('Get vid error') raise RuntimeError title = item['snippet']['title'] title = title.replace("/", " ") vid = item['id']['videoId'] date = item['snippet']['publishedAt'] date = date[0:10] target = f"https://www.youtube.com/watch?v={vid}" thumbnails = item['snippet']['thumbnails']['high']['url'] return { 'Title': title, 'Ref': vid, 'Date': date, 'Target': target, 'Thumbnails': thumbnails }
def getSimilarsFromUrl(self): logging.error("getSimilars") mbid=self.key.id() similars=None #similar=memcache.get("similars of %s"%mbid) if similars is None: similars=[] url=tools.get_url('lastfm','similar',mbid) j=tools.get_json(url) if j is None: return [] try: a=j['similarartists']['artist'] except: return [] for i in a: if i["mbid"]!="": try: cmbid=CorrectArtist.by_id(i["mbid"]) if cmbid is not None: skey=ndb.Key("Artist",cmbid.mbid) else: skey=ndb.Key("Artist",i['mbid']) similars.append(skey) except: pass self.similars=similars self.put()
def execute_request(request): query = { 'ADT': 1, 'CHD': 0, #'DateOut': '2019-05-27', 'DateOut': request.date.isoformat(), #'Destination': 'ZAD', 'Destination': request.dest, 'FlexDaysOut': 6, 'INF': 0, #'Origin': 'FKB', 'Origin': request.orig, 'RoundTrip': 'false', 'TEEN': 0, 'ToUs': 'AGREED', } res = get_json('https://desktopapps.ryanair.com/de-de/availability', params=query) return [ Flight( orig=trip['origin'], dest=trip['destination'], date_out=parse_full_date(flight['time'][0]), date_in=parse_full_date(flight['time'][1]), price=get_cheapest_fare_from_flight(flight), flight_number=flight['flightNumber'], ) for trip in res['trips'] for date in trip['dates'] for flight in date['flights'] if flight['faresLeft'] ]
def get_similar(mbid=""): data=memcache.get("similar_%s"%mbid) if data is not None: return data logging.error("GETTING SIMILAR FROM ECHONEST") url=tools.get_url('echonest','similar',mbid) j=tools.get_json(url) if j is None: return [] similar=[] try: a=j['response']['artists'] except: return [] for i in j['response']['artists']: name=i['name'] if 'foreign_ids' in i.keys(): s_mbid=i['foreign_ids'][0]['foreign_id'][19:] similar.append([name,s_mbid]) logging.error("END OF ECHONEST CODE") memcache.set("similar_%s"%mbid,similar) return similar
def get_one_way_flights_by_time_periode(request): query = { 'departureAirportIataCode': request.orig, #'departureAirportIataCode': 'FKB', 'language': 'de', 'limit': '16', 'market': 'de-de', 'offset': '0', 'outboundDepartureDateFrom': request.date_from.strftime('%Y-%m-%d'), #'outboundDepartureDateFrom': '2019-04-25', 'outboundDepartureDateTo': request.date_to.strftime('%Y-%m-%d'), #'outboundDepartureDateTo': '2019-04-26', 'priceValueTo': request.max_price_value #'priceValueTo': '100' } res = get_json('https://services-api.ryanair.com/farfnd/3/oneWayFares', params=query) #print(res) return [ Flight(orig=trip['outbound']['departureAirport']['iataCode'], dest=trip['outbound']['arrivalAirport']['iataCode'], date_out=parse_full_date_without_milisec( trip['outbound']['departureDate']), date_in=parse_full_date_without_milisec( trip['outbound']['arrivalDate']), price=trip['outbound']['price']['value'], flight_number='') for trip in res['fares'] ]
def live_info(self): live_js = get_json( f"https://twitcasting.tv/streamserver.php?target={self.target_id}&mode=client" ) is_live = live_js['movie']['live'] vid = str(live_js['movie']['id']) live_info = {"Is_live": is_live, "Vid": vid} return live_info
def get_video(mid: int) -> dict: video_info = get_json( f'https://space.bilibili.com/ajax/member/getSubmitVideos?mid={mid}&pagesize=1&tid=0&page=1&keyword=&order=pubdate' ) video = video_info['data']['vlist'][0] title = video['title'] ref = f"https://www.bilibili.com/video/av{video['aid']}" return {'Title': title, 'Ref': ref}
def income_overall_analysis(year=2018): json_data = tools.get_json() json_data = tools.filter_data(json_data, lambda e: e['main']['year'] == year) # offices of interest # offices = [5, 4, 191, 4199, 482, 5963, 1397, 5953, 598, 979, 14, 607, 450] offices = [ 1, 3, 4, 5, 7, 14, 15, 17, 113, 146, 449, 450, 453, 456, 461, 467, 594, 595, 596 ] json_data = tools.filter_data( json_data, lambda e: int(e['main']['office']['id']) in offices) income = np.array([], dtype=[('person_id', 'i8'), ('office_id', 'i8'), ('income_self', 'float64'), ('income_rel', 'float64'), ('income_total', 'float64')]) for entry in json_data: person_id = int(entry['main']['person']['id']) office_id = int(entry['main']['office']['id']) income_self = sum( [e['size'] for e in entry['incomes'] if e['relative'] == None]) income_rel = sum( [e['size'] for e in entry['incomes'] if e['relative'] != None]) income = np.append( income, np.array([(person_id, office_id, income_self, income_rel, income_self + income_rel)], dtype=income.dtype)) # List offices and abbreviations for o in offices: print(o, office_id2name[str(o)], abbreviate_name(office_id2name[str(o)])) # Plot 1: income in each office of interest N = len(offices) dx = [] dy = [] for x in range(N): income_slice = np.array( [item for item in income if int(item['office_id']) == offices[x]], dtype=income.dtype) for y in income_slice: if y['income_rel'] / 1000000.0 < 10: dx.append(x) dy.append(y['income_self'] / 1000000.0) plot1 = plt.scatter(x=dx, y=dy, s=5) plt.ylabel('income (rel), millions of RUB') plt.xticks(range(N), [abbreviate_name(office_id2name[str(o)]) for o in offices]) plt.show()
def get_live_info(self): live_info = get_json(f'https://www.mirrativ.com/api/user/profile?user_id={self.target_id}') nowlive = live_info['onlive'] try: if nowlive: live_id = nowlive['live_id'] return live_id return None except KeyError: self.logger.exception('Get live info error')
def get_edges_json(log_filename, glyphid=None): result = {'edges': []} try: fp = open(op.join(working_dir(), log_filename)) content = fp.read() fp.close() return get_json(content, glyphid) except (IOError, OSError): pass return result
def createEchoTag(self): url = tools.get_url("echonest", "genre", self.param).replace(" ", "%20") j = tools.get_json(url) for d in j["response"]["songs"]: try: mbid = d["artist_foreign_ids"][0]["foreign_id"].split(":")[2] except: continue track = {} track["artist"] = {} tracKey = ndb.Key("Track", d["title"] + " - " + d["artist_name"]) trac = tracKey.get() if trac is not None: track["ytid"] = trac.ytid track["img"] = "http://img.youtube.com/vi/" + trac.ytid + "/0.jpg" cmbid = CorrectArtist.by_id(mbid) if cmbid is not None: track["artist"]["mbid"] = cmbid.mbid artist = ndb.Key("Artist", cmbid.mbid).get() else: track["artist"]["mbid"] = mbid artist = ndb.Key("Artist", mbid).get() if artist is not None: if artist.name is not None: track["artist"]["name"] = artist.name else: track["artist"]["name"] = d["artist_name"] if artist.logo is not None: track["artist"]["logo"] = images.get_serving_url(artist.logo) if artist.info is not None: track["artist"]["info"] = artist.info if artist.tags is not None: track["artist"]["tags"] = artist.tags if artist.similars is not None: similars = [] for s in artist.similars: similar = s.get() similars.append({"name": similar.name, "logo": similar.image, "mbid": similar.key.id()}) track["artist"]["similars"] = similars else: track["artist"]["name"] = d["artist_name"] track["artist"]["similars"] = [] track["artist"]["logo"] = "" track["artist"]["info"] = "" track["artist"]["tags"] = [] track["name"] = d["title"] self.tracks.append(track)
def get_hsl(self, is_live): hsl_info = get_json(f'https://www.mirrativ.com/api/live/live?live_id={is_live}') title = hsl_info['shares']['twitter']['card']['title'] steaming_url = hsl_info['streaming_url_hls'] target = hsl_info['share_url'] date = time.strftime("%Y-%m-%d", time.localtime()) live_dict = {'Title': title, 'Ref': steaming_url, 'Target': target, 'Date': date, 'User': self.target_id} return live_dict
def get(self): tags=memcache.get("lastfm topTags") if tags is None: url=tools.get_url("lastfm","topTags"," ") j=tools.get_json(url) tags=[] for t in j["tags"]["tag"]: if t["name"]!="seen live": tag={} tag["name"]=t["name"] tags.append(tag) memcache.set("lastfm topTags",tags) self.response.out.write(json.dumps(tags))
def get(self): artists=memcache.get("lastfm topArtists") if artists is None: url=tools.get_url("lastfm","topArtists"," ") j=tools.get_json(url) artists=[] for a in j["artists"]["artist"]: artist={} artist["name"]=a["name"] artist["mbid"]=a["mbid"] artists.append(artist) memcache.set("lastfm topArtists",artists) self.response.out.write(json.dumps(artists))
def get_schedule(): query = { 'Destination': 'ZAD', 'IsTwoWay': 'false', 'Months': '16', 'Origin': 'FKB', 'StartDate': '2019-04-22' } res = get_json('https://desktopapps.ryanair.com/Calendar', params=query) print(res['outboundDates']) #print(res['outboundDates'][0]) return res
def income_clustering(year): json_data = tools.get_json() json_data = tools.filter_data(json_data, lambda e: e['main']['year'] == year) json_data = tools.filter_data(json_data, lambda e: e['main']['office']['id'] == 14) data = [] persons = [] for entry in json_data: person_id = entry['main']['person']['id'] income_self = sum( [e['size'] for e in entry['incomes'] if e['relative'] == None]) income_rel = sum( [e['size'] for e in entry['incomes'] if e['relative'] != None]) data.append([income_self, income_rel]) persons.append(person_id) dframe = pd.DataFrame(data, index=persons) dframe.columns = ['income_self', 'income_rel'] print(dframe) classifier = hdbscan.HDBSCAN(min_cluster_size=5).fit(dframe) classified = [] for label in set(filter(lambda x: x >= 0, classifier.labels_)): print('Cluster label: ', label) ids = [i for i, x in enumerate(classifier.labels_) if x == label] for i in ids: print(persons[i], person_id2name[str(persons[i])], data[i]) classified.append((label, persons[i], data[i][0], data[i][1])) print('\n') colour_palette = sns.color_palette('deep', 20) cluster_colours = [colour_palette[x[0]] for x in classified] cluster_member_colours = [ sns.desaturate(x, p) for x, p in zip(cluster_colours, classifier.probabilities_) ] dx = [x[2] for x in classified] dy = [x[3] for x in classified] plt.scatter(x=dx, y=dy, s=10, linewidth=0, c=cluster_member_colours, alpha=1) plt.show()
def getlive_title(self, vid): live_info = get_json( rf'https://www.googleapis.com/youtube/v3/videos?id={vid}&key={self.api_key}&' r'part=liveStreamingDetails,snippet') # 判断视频是否正确 if live_info['pageInfo']['totalResults'] != 1: self.logger.error('Getting title Failed') raise RuntimeError # JSON中的数组将被转换为列表,此处使用[0]获得其中的数据 item = live_info['items'][0] title = item['snippet']['title'] date = item['snippet']['publishedAt'] date = date[0:10] target = f"https://www.youtube.com/watch?v={vid}" return {'Title': title, 'Ref': vid, 'Target': target, 'Date': date}
def get(self): genres=memcache.get("lastfm genres") if genres is not None: self.render("last.html",genres=genres) else: url=tools.get_url("lastfm","toptags"," ") j=tools.get_json(url) genres=[] for i in j["tags"]["tag"]: genres.append((i["name"],i["url"][23:])) memcache.set("lastfm genres",genres) logging.error(genres) self.render("last.html",genres=genres)
def getLogo(self): if self.logo is None: mbid=self.key.id() logging.error("uploading photo") url=tools.get_url('fanart','artist',mbid) logging.error(url) j=tools.get_json(url) #logging.error(j) if j is None: return None try: logo=j['hdmusiclogo'][0]['url'].replace('fanart/','preview/') #logging.error(logo) except: try: logo=j['musiclogo'][0]['url'].replace('fanart/','preview/') #logging.error(logo) except: return None data=urlfetch.fetch(logo).content logging.error("creating base64") content_type, body = BlobstoreUpload.encode_multipart_formdata( [], [('file', mbid, data)]) logging.error("createing uploaddir") response = urlfetch.fetch( url=blobstore.create_upload_url('/uploadblob'), payload=body, method=urlfetch.POST, headers={'Content-Type': content_type}, deadline=30 ) logging.error("response.content") logging.error(response.content) blob_key = blobstore.BlobKey(response.content) logging.error("blob_key") self.logo=blob_key logging.error("getLogo") logging.error(self) self.put() return self.logo
def getData(self): logging.error("getData") mbid=self.key.id() url=tools.get_url("lastfm","artistInfo",mbid) j=tools.get_json(url) self.info=strip_tags(j["artist"]["bio"]["content"]) self.name=j["artist"]["name"] self.image=j["artist"]["image"][4]["#text"] tags=[] for t in j["artist"]["tags"]["tag"]: tag=t["name"] tags.append(tag) self.tags=tags self.put()
def post(self): data = {} if 'isofile' in self.request.files: isofile = self.request.files['isofile'][0] original_fname = isofile['filename'] if tools.is_valid_file(isofile): try: save_as = "uploads/" + original_fname output_file = open(save_as, 'wb') output_file.write(isofile['body']) data['status'] = 'alert-success' data['message'] = "file %s was uploaded" % original_fname source = tools.get_json(save_as) upload = models.Upload(original_fname, datetime.datetime.now().isoformat(), source) self.session.add(upload) self.session.commit() except IOError: data['status'] = 'alert-danger' data['message'] = "IOError trying to record: %s" % original_fname except IntegrityError: self.session.rollback() data['status'] = 'alert-danger' data['message'] = "Please check if the file already exists, you must remove it before upload again the same file: %s" % original_fname else: data['status'] = 'alert-danger' data['message'] = "file %s with content-type or extension not allowed. Please, upload files with valid extension (.iso, .part)." % original_fname else: data['status'] = 'alert-danger' data['message'] = "You must select a file" data['uploaded_files'] = self.session.query(models.Upload).all() self.render("index.html", data=data)
async def run(cmd_context): #This uses "fixer" key in keys.json to access exchange rate API. Contact Giovata if you want to know how to get one. base = "http://data.fixer.io/api/latest?access_key=" + get_json( cmd_context.settings["keys_path"])["fixer"] msg = cmd_context.msg args = cmd_context.args if len(args) != 3: await msg.channel.send("Invalid syntax.\n" + get_properties()["syntax"].replace( "{PREFIX}", cmd_context.settings["prefix"])) else: try: amount = float(args[0]) except: await msg.channel.send("Invalid amount.") return cur_from = args[1].upper() cur_to = args[2].upper() amount_string = args[0] url = f"{base}&from={cur_from}&to={cur_to}&amount={amount_string}" try: response = requests.get(url) except: await msg.channel.send("An error occurred.") return data = response.json() rates = data["rates"] try: in_euro = amount / rates[cur_from] converted = in_euro * rates[cur_to] except: await msg.channel.send("Invalid currency code.") return conv_decimal = decimal.Decimal(converted) conv_rounded = round(conv_decimal, 2) conv_string = str(conv_rounded) await msg.channel.send( f"{amount_string} {cur_from} = {conv_string} {cur_to}")
def get_video_num(mid: int) -> int: nav_info = get_json( f'https://api.bilibili.com/x/space/navnum?mid={mid}&jsonp=jsonp') video_num = nav_info['data']['video'] return video_num
def getArtistTracks(genre): url = tools.get_url("lastfm", "artisttoptracks", genre) logging.error(url) tracks = tools.get_json(url) return tracks
def getEchoTagTracks(genre): playlist = {"data": []} url = tools.get_url("echonest", "genre", genre).replace(" ", "%20") logging.error(url) j = tools.get_json(url) return j["response"]["songs"]
def get(self): tipo=self.request.get("tipo") if tipo=="album" or tipo == "artist": mbid=self.request.get("mbid") logging.error(self.request.arguments()) d={'album':['number','asc'],'artist':['hottness','desc']} query="select video from Tracks where %s_mbid='%s' order by %s %s"%(tipo,mbid,d[tipo][0],d[tipo][1]) data=list(ndb.gql(query)) f=data.pop(0) first=f.video playlist="" videos=[] for d in data: if d.video not in videos: playlist=playlist+d.video+"," videos.append(d.video) elif tipo=="lastfm": modo=self.request.get("modo") videos=[] playlist="" tracks="tracks" if modo=="hypped": url=tools.get_url("lastfm","hyppedtracks", " ") elif modo=="top": url=tools.get_url("lastfm","toptracks"," ") elif modo=="loved": url=tools.get_url("lastfm","lovedtracks"," ") elif modo=="tag": """first=memcache.get("lastfm first %s") if first is not None: playlist=memcache.get("lastfm playlist %s") if playlist is not None: url="http://www.youtube.com/embed/"+first+"?playlist="+playlist self.render_playlist(url=url)""" genre=self.request.get("genre") url=tools.get_url("lastfm","toptagtracks",genre) tracks="toptracks" j=tools.get_json(url) for i in j[tracks]['track']: track_mbid=i['mbid'] if track_mbid!="": song=i['name'] artist=i['artist']['name'] artist_mbid=i['artist']['mbid'] t=ndb.gql("select video from Tracks where track_mbid='%s'"%track_mbid) if t.get() is None: video=track.get_video(artist,song) if artist_mbid!="": taskqueue.add(url='/artist', params={'mbid':artist_mbid}) else: video=t.get().video videos.append(video) if len(videos)>=1: first=videos.pop(0) for i in videos: playlist=playlist+i+"," url="http://www.youtube.com/embed/"+first+"?playlist="+playlist self.render_playlist(url=url)
def get_airports_raw_data(): print('Getting airports information') return get_json( 'https://api.ryanair.com/aggregate/4/common?embedded=airports&market=de-de' )
def mistrust_index(year=2018): json_data = tools.get_json() json_data = tools.filter_data(json_data, lambda e: e['main']['year'] == year) rating = defaultdict(int) scored = defaultdict(int) for entry in json_data: person_id = entry['main']['person']['id'] score = 0 # 1: total income is low, but savings are huge income = sum([e['size'] for e in entry['incomes']]) savings = 0 for s in entry['savings']: size = float(s.split('руб.')[0].replace(' ', '').replace(',', '.')) savings += size if (income > 0 and savings / income >= 5.0) or (income == 0 and savings > 0): score += 1 scored[1] += 1 # 2: personal income is low, but the relatives' income is huge income_self = sum( [e['size'] for e in entry['incomes'] if e['relative'] == None]) income_rel = sum( [e['size'] for e in entry['incomes'] if e['relative'] != None]) if (income_self > 0 and income_rel / income_self >= 5.0) or (income_self == 0 and income_rel > 0): score += 1 scored[2] += 1 # 3: zero total income(can be due to incorrectly submitted declaration, but still not good) if income == 0: score += 1 scored[3] += 1 # 4: low income, but owns a lot in real estate estates_area = 0 for estate in entry['real_estates']: # shall we exclude relatives? if not estate['square']: continue total = float(estate['square']) if estate['share']: total *= float(estate['share']) estates_area += total if income / 1000000.0 < 1.0 and estates_area > 500.0: score += 1 scored[4] += 1 # 5: lux cars lux_cars = [{ 'parent_name': 'BMW', 'name': '3 series' }, { 'parent_name': 'BMW', 'name': '5 series' }, { 'parent_name': 'BWM', 'name': '7 series' }, { 'parent_name': 'Acura', 'name': 'Acura' }, { 'parent_name': 'Audi', 'name': 'A4' }, { 'parent_name': 'Audi', 'name': 'A6' }, { 'parent_name': 'Audi', 'name': 'A7' }, { 'parent_name': 'Audi', 'name': 'A8' }, { 'parent_name': 'Alfa Romeo', 'name': 'Giulietta' }, { 'parent_name': 'Bentley' }, { 'parent_name': 'Cadillac' }, { 'parent_name': 'Ferrari' }, { 'parent_name': 'Hummer' }, { 'parent_name': 'Infinity' }, { 'parent_name': 'Jaguar' }, { 'parent_name': 'Lamborghini' }, { 'parent_name': 'Land Rover' }, { 'parent_name': 'Lexus' }, { 'parent_name': 'Maserati' }, { 'parent_name': 'Mercedes-Benz', 'name': 'C-класс' }, { 'parent_name': 'Mercedes-Benz', 'name': 'E-класс' }, { 'parent_name': 'Mercedes-Benz', 'name': 'GL-класс' }, { 'parent_name': 'Mercedes-Benz', 'name': 'S-класс' }, { 'parent_name': 'Porsche' }, { 'parent_name': 'Rolls-Royce' }, { 'parent_name': 'Saab', 'name': '9-3' }, { 'parent_name': 'Saab', 'name': '9-5' }, { 'parent_name': 'Volkswagen', 'name': 'Phaeton' }, { 'parent_name': 'Volvo', 'name': 'S60' }, { 'parent_name': 'Volvo', 'name': 'S80' }] has_lux = 0 for vehicle in entry['vehicles']: if not vehicle['brand']: continue for item in lux_cars: if 'name' in item: for brand in carbrand: if brand['parent_name'] == item[ 'parent_name'] and brand['name'] == item[ 'name'] and brand['id'] == vehicle[ 'brand']['id']: has_lux = 1 else: for brand in carbrand: if brand['parent_name'] == item[ 'parent_name'] and brand['id'] == vehicle[ 'brand']['id']: has_lux = 1 score += has_lux scored[5] += has_lux rating[person_id] += score return rating
async def run(cmd_context): msg = cmd_context.msg args = cmd_context.args custom_commands = cmd_context.settings["custom_commands"] if len(args) == 1: if args[0] == "list": await msg.channel.send(list_commands(custom_commands)) else: await error(msg, "Invalid syntax.") elif len(args) == 2: if args[0] == "info": await msg.channel.send( command_info(custom_commands, args[1].lower())) elif args[0] == "add": if existing_alias(custom_commands, args[1]): await error(msg, "Invalid command.") else: server_json = get_json(cmd_context.settings["server_path"]) server_json = add_command( server_json, cmd_context.settings["id"], default_command(cmd_context.settings["template_path"], args[1].lower())) set_json(cmd_context.settings["server_path"], server_json) await msg.channel.send( "Added new command. Remember: `{}cmd options {} output <output text>`" .format(cmd_context.settings["prefix"], args[1].lower())) elif args[0] == "remove": if existing_alias(custom_commands, args[1]): server_json = get_json(cmd_context.settings["server_path"]) for server in server_json: if server["id"] == cmd_context.settings["id"]: server_json = remove_command( server["settings"]["custom_commands"], default_command( cmd_context.settings["template_path"], args[1].lower())) set_json(cmd_context.settings["server_path"], server_json) await msg.channel.send("Removed command.") return else: await error(msg, "Invalid command.") else: await error(msg, "Invalid syntax.") elif len(args) == 3: if args[0] == "clone": if existing_alias(custom_commands, args[1]) == False: await error(msg, "Invalid command.") elif existing_alias(custom_commands, args[2]): await error(msg, "Invalid command.") else: server_json = get_json(cmd_context.settings["server_path"]) server_id = cmd_context.settings["id"] for custom_command in server_json["servers"][server_id][ "settings"]["custom_commands"]: if args[1] in custom_command["aliases"]: previous_command = custom_command new_command = args[2] set_json( cmd_context.settings["server_path"], clone_command(server_json, server_id, previous_command, new_command)) await msg.channel.send("Cloned command.") return elif args[0] == "options": command = args[1] option = args[2] if existing_alias(custom_commands, command): valid_options = [ "output", "description", "minpermlevel", "listed" ] valid_operations = [ "addalias", "removealias", "addrole", "removerole" ] if option in valid_options or option in valid_operations: server_json = get_json(cmd_context.settings["server_path"]) server_id = cmd_context.settings["id"] if command in valid_options: server_json = set_property(server_json, server_id, command, option, args[3]) elif command in valid_operations: if command == "addalias": server_json = add_alias(server_json, server_id, command, args[3]) elif command == "removealias": server_json = remove_alias(server_json, server_id, command, args[3]) if command == "addrole": server_json = add_role(server_json, server_id, command, args[3]) elif command == "removealias": server_json = remove_role(server_json, server_id, command, args[3]) else: await error(msg, "Invalid option.") return else: await error(msg, "Invalid command.") return set_json( cmd_context.settings["server_path"], clone_command(server_json, server_id, previous_command, new_command)) else: await error(msg, "Invalid option.") else: await error(msg, "Invalid command.") else: await error(msg, "Invalid syntax.") else: await error(msg, "Invalid syntax.") await msg.channel.send("TODO.")
def default_command(template_path, cmd_name): cmd = get_json(template_path)["custom_command_template"] cmd["aliases"].append(cmd_name) return cmd
async def run(cmd_context): msg = cmd_context.msg args = cmd_context.args if len(args) == 3: if args[0] == "set": key = args[1].lower() template_json = get_json(cmd_context.settings["template_path"]) valid_setting = False for setting in template_json["server_template"]["settings"]: if key == setting: valid_setting = True if not valid_setting: await msg.channel.send("Invalid setting.") return maximum = template_json["server_maximum_settings"][key] if maximum == None: await msg.channel.send("Invalid setting.") else: raw_value = args[2] server_json = get_json(cmd_context.settings["server_path"]) server_id = str(cmd_context.msg.guild.id) server_settings = get_settings(server_json, server_id) t = type(server_settings[key]) if t == type(""): if len(raw_value) > maximum and maximum != 0: await msg.channel.send("Invalid value.") return server_json = set_setting(server_json, server_id, key, raw_value) set_json(cmd_context.settings["server_path"], server_json) await msg.channel.send("Setting has been changed.") elif t == type(1): try: value = int(raw_value) if value > maximum and maximum != 0: await msg.channel.send("Invalid value.") return server_json = set_setting(server_json, server_id, key, value) set_json(cmd_context.settings["server_path"], server_json) await msg.channel.send("Setting has been changed.") except ValueError: await msg.channel.send("Integer expected.") elif t == type(1.0): try: value = float(raw_value) if value > maximum and maximum != 0: await msg.channel.send("Invalid value.") return server_json = set_setting(server_json, server_id, key, value) set_json(cmd_context.settings["server_path"], server_json) await msg.channel.send("Setting has been changed.") except ValueError: await msg.channel.send("Floating point expected.") elif t == type(True): if str(raw_value).lower() == "true": value = True elif str(raw_value).lower() == "false": value = False else: await msg.channel.send("Boolean expected.") return server_json = set_setting(server_json, server_id, key, value) set_json(cmd_context.settings["server_path"], server_json) await msg.channel.send("Setting has been changed.") else: await msg.channel.send("Invalid setting.") elif len(args) == 1: if args[0] == "list": template_json = get_json(cmd_context.settings["template_path"]) server_json = get_json(cmd_context.settings["server_path"]) server_id = str(cmd_context.msg.guild.id) server_settings = get_settings(server_json, server_id) output = "__**Settings:**__\n" for setting in server_settings: if template_json["server_maximum_settings"][setting] != None: output = output + "\n" + setting + ": " + str( server_settings[setting]) await msg.channel.send(output) else: await msg.channel.send("Invalid syntax.\n" + get_properties()["syntax"].replace( "{PREFIX}", cmd_context.settings["prefix"]))
def get_echonest_playlist(tipo, mbid): if tipo == "artist": url = tools.get_url("echonest", "artist", mbid) j = tools.get_json(url) return j["response"]["songs"]
def getTagTracks(genre): url = tools.get_url("lastfm", "toptagtracks", genre) logging.error(url) tracks = tools.get_json(url) return tracks
def get_airports(): print('Finding all airports') return get_json('https://desktopapps.ryanair.com/de-de/res/stations')
def client_thread(pipe, queue): class Gold(discord.Client): # outputs a message to a log (by default, printing to the console) def log(self, text): print(text) # sets a marker whether modules have been imported def set_imported(self, value): self.has_imported_modules = value # gets whether modules have been imported def get_imported(self): return self.has_imported_modules # imports modules in the 'modules' directory and adds them to self.modules def import_modules(self, path): self.modules = [] for module in os.listdir(path): module_path = path + "/" + module if os.path.isdir(module_path): if len(os.listdir(module_path)) >= 1: self.modules.append(Module(module_path, module)) # imports commands for all modules in self.modules which are marked with an enabled attributed def import_commands(self): for module in self.modules: if module.enabled: module.commands = [] module.import_commands() # returns the overall amount of command files for all enabled modules def count_commands(self): count = 0 for module in self.modules: if module.enabled: count += module.count_commands() return count # experimental # adds a new VoiceClient object to a list when it is created #def init_vc(self, value): # self.vcs = value # adds a new VoiceClient object to a list when it is created #def add_vc(self, vc): # self.vcs.append(vc) # removes a VoiceClient (when disconnecting) #def rem_vc(self, index): # self.vcs.remove(index) # constant values VERSION = "1.0" PATH = os.path.dirname(os.path.abspath(__file__)) KEYS_PATH = PATH + "/keys.json" MODULES_PATH = PATH + "/modules" SERVER_PATH = PATH + "/data/servers.json" TEMPLATE_PATH = PATH + "/data/template.json" DEFAULT_PERM_LEVEL = 0 BASE_SETTINGS = { "version": VERSION, "path": PATH, "server_path": SERVER_PATH, "template_path": TEMPLATE_PATH, "modules_path": MODULES_PATH, "default_perm_level": DEFAULT_PERM_LEVEL, "perm_level": DEFAULT_PERM_LEVEL, "keys_path": KEYS_PATH } ge = Gold() @ge.event async def on_ready(): ge.set_imported(False) ge.import_modules(MODULES_PATH) CMD_COUNT = ge.count_commands() init_text = f"\nSuccessfully connected. Running:\nGold Experience v{VERSION}\nas user {ge.user.name} ({ge.user.id})\nin directory {PATH}\nwith {CMD_COUNT} commands present.\n-----\n" ge.log(init_text) @ge.event async def on_message(msg): await msghandler.handle(ge, msg, BASE_SETTINGS) TOKEN = get_json(KEYS_PATH)["bot_token"] try: ge.loop.run_until_complete(ge.start(TOKEN)) except KeyboardInterrupt: ge.loop.run_until_complete(ge.logout()) finally: ge.loop.close()
def getTopTracks(): url = tools.get_url("lastfm", "toptracks", " ") logging.error(url) toptracks = tools.get_json(url) return toptracks
def get_playlist_from_url(playlist_name): playlist = {"data": []} if "frontpage" in playlist_name: logging.error("front") url = tools.get_url("lastfm", "toptracks", " ") logging.error(url) j = tools.get_json(url) for i in j["tracks"]["track"]: if len(playlist["data"]) < 11: mbid = i["mbid"] track_name = i["name"] artist_name = i["artist"]["name"] ancestor_key = Class.Artists().query(Class.Artists.artist_name == artist_name).get(keys_only=True) if ancestor_key is not None: tracks = Class.Tracks().query(Class.Tracks.track_name == track_name, ancestor=ancestor_key).get() else: tracks = None if tracks is None: track_video = track.get_video(artist_name, track_name) else: track_video = tracks.track_video video = { "video_artist": artist_name, "video_track": track_name, "playlist_videos": track_video, "mbid": mbid, } playlist["data"].append(video) p = Class.Playlists( playlist_name=playlist_name, playlist_json=j, playlist_videos=playlist, key=ndb.Key(Class.Playlists, playlist_name), ) # p.put() memcache.set(playlist_name, playlist) return playlist logging.error("radio") if "radio" in playlist_name: if "artist" in playlist_name: params = playlist_name.split() mbid = params[1] url = tools.get_url("echonest", "playlist", mbid) else: genre = playlist_name[0 : playlist_name.find("radio")] url = tools.get_url("echonest", "genre", genre).replace(" ", "%20") logging.error(url) j = tools.get_json(url) for i in j["response"]["songs"]: track_name = i["title"] artist_name = i["artist_name"] ancestor_key = Class.Artists().query(Class.Artists.artist_name == artist_name).get(keys_only=True) if ancestor_key is not None: tracks = Class.Tracks().query(Class.Tracks.track_name == track_name, ancestor=ancestor_key).get() else: tracks = None if tracks is None: track_video = track.get_video(artist_name, track_name) else: track_video = tracks.track_video video = {"video_artist": artist_name, "video_track": track_name, "playlist_videos": track_video} playlist["data"].append(video) p = Class.Playlists( playlist_name=playlist_name, playlist_json=j, playlist_videos=playlist, key=ndb.Key(Class.Playlists, playlist_name), ) p.put() memcache.set(playlist_name, playlist) logging.error(playlist) return playlist
def get_lastfmGenre_playlist(genre): url = tools.get_url("lastfm", "toptagtracks", genre) j = tools.get_json(url) return j
def income_self_vs_rel(office_id=14): global income_data json_data = tools.get_json() data = [] for entry in json_data: year = entry['main']['year'] person_id = entry['main']['person']['id'] office_id = entry['main']['office']['id'] income_self = sum( [e['size'] for e in entry['incomes'] if e['relative'] == None]) income_rel = sum( [e['size'] for e in entry['incomes'] if e['relative'] != None]) data.append([ year, person_id, office_id, income_self, income_rel, income_self + income_rel ]) dframe = pd.DataFrame(data) dframe.columns = [ 'year', 'person_id', 'office_id', 'income_self', 'income_rel', 'income_total' ] print('office_id:', office_id) print('office_name:', office_id2name[str(office_id)]) dframe = dframe.loc[dframe.loc[:, 'office_id'] == int(office_id), :] persons = set() income_data_self = defaultdict() income_data_rel = defaultdict() income_data_total = defaultdict() for p in office2persons[str(office_id)]: entries = dframe.loc[dframe.loc[:, 'person_id'] == int(p), :] values_self = [] values_rel = [] values_total = [] for year in range(2006, 2018 + 1): this_year = entries.loc[entries.loc[:, 'year'] == year, :] if this_year.shape[0] > 0: values_self.append(this_year.at[this_year.index[0], 'income_self']) values_rel.append(this_year.at[this_year.index[0], 'income_rel']) values_total.append(this_year.at[this_year.index[0], 'income_total']) else: values_self.append(None) values_rel.append(None) values_total.append(None) income_data_self[p] = pd.Series(values_self, index=range(2006, 2018 + 1)) income_data_rel[p] = pd.Series(values_rel, index=range(2006, 2018 + 1)) income_data_total[p] = pd.Series(values_total, index=range(2006, 2018 + 1)) prs.add(p) print('Total selected:', len(prs)) for p in persons: relevant_years = [] relevant_values_self = [] relevant_values_rel = [] for year in range(2006, 2018 + 1): if not (pd.isna(income_data_self[p][year]) or pd.isna(income_data_rel[p][year])): relevant_years.append(year) relevant_values_self.append(income_data_self[p][year] / 1000000.0) relevant_values_rel.append(income_data_rel[p][year] / 1000000.0) if len(relevant_years) <= 5 or sum(relevant_values_rel) == 0.0: continue corr = np.corrcoef(relevant_values_self, relevant_values_rel)[0, 1] if corr < -0.7: print(p, corr, relevant_values_self, relevant_values_rel) fig, ax = plt.subplots() ax.plot(relevant_years, relevant_values_self, label=person_id2name[str(p)]) ax.plot(relevant_years, relevant_values_rel, label='relatives') plt.legend(loc='upper left') plt.show()