def get_weather(city): rapid = RapidConnect("default-application_5a68be4ce4b09c6b06da6c08", "551285ee-2f7d-45e2-8471-bc4aaae024ed") result = rapid.call('YahooWeatherAPI', 'getWeatherForecast', { 'location': city, 'filter': ['item.condition,item.forecast'] }) return result
def post(self): url = self.get_body_argument('url') rapid = RapidConnect("default-application_59dda5c4e4b04627fc65932d", "50647dfc-4872-43d2-a233-87ea46cc35e7") result = rapid.call( 'MicrosoftComputerVision', 'describeImage', { 'subscriptionKey': 'e67939b51a3f4cd3b11da510fbed6d0b', 'image': url, 'region': 'westcentralus' }) data = json.loads(result) #it is a dictionary tags = data.get('description').get('tags') #iterate this caption = data.get('description').get('captions')[0].get('text') confidence = data.get('description').get('captions')[0].get( 'confidence') * 100 self.render_template("microsoft-recognition.html", { 'tags': tags, 'caption': caption, 'confidence': confidence })
def maps(): form = AirportsForm(request.form) departure = None rate = 1 mapcost = '' mapduration = '' mapeasiness = '' msg = '' did = '' with orm.db_session: logs = Log.select(lambda p: p.country == 'AIR').order_by( orm.desc(Log.id))[:] previous = orm.select(c.departure for c in Direction).prefetch(Airport)[:] if logs[0].dt + timedelta(days=1) > datetime.now(): delta = (logs[0].dt + timedelta(days=1) - datetime.now()).seconds hours = (delta // 3600) minutes = (delta - hours * 3600) // 60 msg = 'New search will be available in %s hour(s) %s minutes.' % ( hours, minutes) else: airports = Airport.select().order_by(Airport.country)[:] form.airports.choices = [ (x.id, '%s - %s, %s' % (x.country, x.code, x.city)) for x in airports ] if request.method == 'GET': did = request.values.get('did', '') currency = 'USD' if did != '': cr = CurrencyRates() with orm.db_session: departure = Airport.get(id=int(did)) #directions=Direction.select(lambda p: p.departure==departure).prefetch(Airport)[:] directions = Direction.select_by_sql( 'select d0.* from Directions d0 join Airports a0 on a0.id=d0.destination where d0.cost=(select min(d.cost) from Directions d join Airports a on a.id=d.destination where a.country=a0.country and d.departure=d0.departure) and departure=%s' % departure.id) try: rate = cr.get_rate(directions[0].currency, 'USD') except RatesNotAvailableError: if directions[0].currency == 'KZT': rate = 0.0032 else: rate = 1 currency = directions[0].currency locations = [d.destination.iso3 for d in directions] texts = [ '%s - %s, %s' % (d.destination.country, d.destination.code, d.destination.city) for d in directions ] vals = [int(d.cost * rate) for d in directions] costperm = [ int(d.cost * rate * d.duration / float(d.distance)) for d in directions ] times = [d.duration // 60 for d in directions] mapcost = makemap( 'Flight cost from %s to some countries.' % departure.code, 'Cost, %s' % currency, locations, vals, texts) mapduration = makemap( 'Total flight duration, best from 3 most cheap flight.', 'Duration, hours', locations, times, texts) mapeasiness = makemap('Easiness (cost * duration / distance).', 'Easiness, $*h/mi', locations, costperm, texts) elif form.validate_on_submit() and msg == '': with orm.db_session: dtstr = (datetime.now() + timedelta(days=30)).strftime('%Y-%m-%d') departure = Airport.get(id=int(request.form.get('airports'))) iteration = orm.max(p.iteration for p in Direction if p.departure == departure) if iteration == None: iteration = 0 if iteration < 3: iteration += 1 else: iteration = 1 dirs = Direction.select(lambda p: p.departure == departure and p. iteration == iteration).delete(bulk=True) portstodo = Airport.select(lambda p: p.iteration == iteration and p .code != departure.code)[:] rapid = RapidConnect("flight-167611", "12489be9-a1b2-442f-9f98-77609a7d6a9d") for x in portstodo: processdestination(rapid, departure, x, dtstr, iteration) log = Log(keyword='none', dt=datetime.now(), country='AIR') return redirect('%s?did=%s' % (url_for('maps'), departure.id)) return render_template('parsers/maps.jade', form=form, msg=msg, departure=departure, previous=previous, mapcost=mapcost, mapduration=mapduration, mapeasiness=mapeasiness)
from rapidconnect import RapidConnect import sys city = sys.argv[1] rapid = RapidConnect("default-application_59bbda2be4b0b0cacf7c7995", "db4efe57-20a5-4f2d-966c-b292a14a8695") result = rapid.call('YahooWeatherAPI', 'getWeatherForecast', {'location': city}) desc = result["query"]["results"]["channel"]["item"]["condition"]["text"] temp = result["query"]["results"]["channel"]["item"]["condition"]["temp"] print("It's " + desc + " in " + city.capitalize() + ", with a temperature of " + temp + " Farenheit.") sys.stdout.flush()
from rapidconnect import RapidConnect rapid = RapidConnect('Emotions', '24aa499b-e5a4-491e-90ce-9bb0f8d75c86'); import base64 import json import emotion from collections import Counter # result = rapid.call('MicrosoftEmotionAPI', 'getEmotionRecognition', { # 'subscriptionKey': '90febbecca1c462f871ea1d8e349d76a', # 'image': 'http://i.imgur.com/shDtPNc.jpg' # }) def faceArea(dct): area = dct['faceRectangle']['width']*dct['faceRectangle']['height'] return area # maxFace = max(result, key=faceArea) def getEmotions(face): return rankedEmotions def getMaxEmotion(face): emotionsList = dictToList(face) maxEmo = max(emotionsList, key=lambda x: x[1]) return maxEmo def dictToList(dct): keys = dct['scores'].keys() scores = dct['scores'] tupList = [] for key in keys:
# -*- coding: utf-8 -*- """ Created on Tue Aug 14 07:10:48 2018 @author: FluxMonitor Description: Uses Dark Sky API to request historical weather data """ import pandas as pd import numpy as np from rapidconnect import RapidConnect import datetime as dt import os username = os.getlogin() rapid = RapidConnect("default-application_5b72b75fe4b02799e7f62892", "4df23935-0f28-4bd5-9eb1-0e8d7b021f78") today = pd.to_datetime(dt.datetime.now().strftime('%y-%m-%d 00:00:00'),yearfirst = True) today = today date_range = pd.date_range('2018-04-17 00:00:00',today) date_range_hourly = pd.date_range('2018-04-17 00:00:00','2018-04-20 00:00:00',freq = '1h') precip = pd.DataFrame(index = date_range,columns = ['date','precip_rate_mm_hour','precip_type']) daily_precip = pd.DataFrame(index = date_range,columns = ['precip_avg_rate_mm_hour','precip_type']) hourly_precip = pd.DataFrame(index = date_range_hourly, columns = ['precip_rate_mm_hour','precip_type']) for i in date_range: print(i) for d in date_range: result = rapid.call('Darksky', 'getTimeMachineRequest', { 'apiKey': 'c230f720cf53566b8f33657451e694d3', 'time': d, 'coordinates': '42.5315, -72.1899',
# -*- coding: utf-8 -*- from rapidconnect import RapidConnect rapid = RapidConnect("testa", "a3787239-bb1e-4fa4-85c3-a423fa6af51f") result = rapid.call( "GoogleTranslate", "translate", { 'string': 'שלום', 'sourceLanguage': 'he', 'targetLanguage': 'en', 'apiKey': 'XXXX' }) print result def on_join(): print("joined!") def on_message(message): print(message) def on_close(): print("Closed!") def on_error(message): print("error:")
def dawgstuff(): r2 = str(request.get_data()) print(r2) pagina = "https://www.allmenus.com/custom-results/-/ca/san-jose/" + r2[1:] print(pagina) pg = urllib2.urlopen(pagina) sopa = BeautifulSoup(pg, "html.parser") thelinks = sopa.findAll("a", {"data-masterlist-id": re.compile(r".*")}) print(thelinks) da_str = str(thelinks[0]) #print(re.search(r'\"\.+\"',da_str).group(0)) start = da_str.index("/ca/") end = da_str.index("/menu/") + 6 da_str = da_str[start:end] print(da_str) quote_page = "https://www.allmenus.com" + da_str #https://www.allmenus.com/ca/san-jose/216536-mcdonalds/menu/ print(quote_page) page = urllib2.urlopen(quote_page) soup = BeautifulSoup(page, 'html.parser') titles = soup.find_all("span", attrs={'class': 'item-title'}) print(titles) ingredients = soup.find_all("p", attrs={'class': 'description'}) print(ingredients) counter = 0 class total_values: fat = 65 carbs = 300 protein = 50 sodium = 2.4 def function(self): pass class ideal_values: fat = total_values.fat / 3 carbs = total_values.carbs / 3 protein = total_values.protein / 3 for i in range(0, len(titles)): print(i) print(titles[i].get_text()) print(ingredients[i].get_text()) stripped_menu.append(titles[i].get_text() + ": " + ingredients[i].get_text()) #print(stripped_menu) parsed_titles = [] for title in titles: parsed_titles.append(title) print(parsed_titles) parsed_titles = remove_duplicates(parsed_titles) print(parsed_titles) rapid = RapidConnect("d_562854dae4b049ee93f17049", "1d71d14a-e7c0-478c-9635-f76b62490adb") for i in range(0, len(parsed_titles)): food = parsed_titles[i] print(food) counter += 1 if counter > 5: break result = rapid.call( 'Nutritionix', 'getFoodsNutrients', { 'applicationSecret': '8594e772f2b653ab072e04d6a5feb6e0', 'foodDescription': food, 'applicationId': '444f4a33' }) print(result) if "We couldn't match any of your foods" in str(result): pass else: info = result[0]["foods"][0] print(info["food_name"]) #print("Sodium: " + str(info["nf_sodium"])) print("fat: " + str(info["nf_total_fat"])) print("carbohydrates: " + str(info["nf_total_carbohydrate"])) print("protein: " + str(info["nf_protein"])) #sodium = info["nf_sodium"] aayush says something abuot iterations in presentaion fat = info["nf_total_fat"] carbs = info["nf_total_carbohydrate"] protein = info["nf_protein"] rating = (abs(0.33 - fat / ideal_values.fat) + abs(0.33 - carbs / ideal_values.carbs) + abs(0.33 - protein / ideal_values.protein)) ratings.append([stripped_menu[i], rating]) print(ratings) sorted_ratings = sorted(ratings, key=lambda x: x[1]) final_stuff = sorted_ratings return str(final_stuff)
clarifai_model = clarifai.models.get('general-v1.3') tagger = PerceptronTagger() total_word_counts = {} for s in brown.sents(): s = map(lambda k: k.lower(), s) for w in set(s): if w not in total_word_counts: total_word_counts[w] = 0 total_word_counts[w] += 1 brown_sent_count = len(brown.sents()) rapid = RapidConnect('cmpre', 'a0c4b418-1531-4c31-abbe-24df50f8a74b') @app.route('/convert_html', methods=['POST']) def convert_html(): """ Given an HTML page as a string, apply all of our comprehension-improving transformations, and return the new HTML. """ data = request.get_json(force=True) html = data['page'] host = data['host'] hostrel = data['hostrel'] count, html, summary = convert(html, host, hostrel) print(summary) return json.dumps({'count': count, 'html': html, 'summary': summary})
from rapidconnect import RapidConnect from os import path from pprint import pprint import json app_dir = path.dirname(__file__) image_dir = path.join(app_dir, 'images') API_KEY = "########" # rapidAPI Initialize rapid = RapidConnect('WasteZero', API_KEY) # Opening text files and spliting them into words with open('compost.txt', 'r+') as f: compost = f.readlines() compost = [x.lower() for x in compost] compost = [x.strip() for x in compost] with open('recyclables.txt', 'r+') as f: recycable = f.readlines() recycable = [x.lower() for x in recycable] recycable = [x.strip() for x in recycable] with open('landfill.txt', 'r+') as f: landfill = f.readlines() landfill = [x.lower() for x in landfill] landfill = [x.strip() for x in landfill] def sortImage(url): """
import json, os from rapidconnect import RapidConnect rapid = RapidConnect("default-application_5a191a72e4b0d45349f766fe", "bc65de32-d2c1-4679-94d9-2683183f4521") if __name__ == '__main__': downloaded = open('../latest_songs_2/downloaded_songs.json', 'w+') d_json = {} artists_json = rapid.call('LastFM', 'getTopArtistsChart', {'apiKey': '9c57fb1f3de3132bda349ef57801988d'}) #artists_json = json.loads(artists); full_list = artists_json['artists']['artist'] for item in full_list: temp = [] artist_name = item['name'].encode('ascii', 'ignore') top_50_songs = rapid.call( 'LastFM', 'getTopArtistTracks', { 'apiKey': '9c57fb1f3de3132bda349ef57801988d', 'artist': artist_name, 'limit': '5', 'page': '5' }) all_tracks = top_50_songs['toptracks']['track']
from urllib.request import urlopen from pydub import AudioSegment from pydub.playback import play # mp3file = urlopen("https://cs9-19v4.userapi.com/p1/d722df69efb593.mp3") # with open('./test.mp3', 'wb') as output: # output.write(mp3file.read()) # # song = AudioSegment.from_mp3("./test.mp3") # play(song) from rapidconnect import RapidConnect rapid = RapidConnect('robotshanti_5a4f3a79e4b09c6b28427e54', 'f9379269-6068-433b-a882-e22a4655ebd8') result = rapid.call( 'SpotifyPublicAPI', 'searchTracks', { 'accessToken': 'robotshanti_5a4f3a79e4b09c6b28427e54', 'query': 'I am not Afraid' }) print(result)
from rapidconnect import RapidConnect from flask import request rapid = RapidConnect('foodpool', '521832dd-f936-404c-a121-9ee6781cabea') from flask import Flask from flask import g import cf_deployment_tracker import sqlite3 import os import uuid, os from flask import Flask, jsonify, request #Square Modules import squareconnect from squareconnect.rest import ApiException from squareconnect.apis.transaction_api import TransactionApi access_token = 'sandbox-sq0atb-vwQUCVDYaJrgubwxf5VnkQ' location_id = 'CBASEMe2x6MwoifEPrKa8Toz5gk' # Emit Bluemix deployment event cf_deployment_tracker.track() app = Flask(__name__) # On Bluemix, get the port number from the environment variable VCAP_APP_PORT # When running this app on the local machine, default the port to 8080 port = int(os.getenv('VCAP_APP_PORT', 8080)) DATABASE = './database.db'
from rapidconnect import RapidConnect rapid = RapidConnect('openwerx-greypill', 'b6b91fb4-9c29-43ee-8ffe-342f8bca0dcf') def gtDetectLang(text): return rapid.call('GoogleTranslate', 'detectLanguage', { 'apiKey': 'AIzaSyBv2QEVIH-ZNORgAJkTTaDmx_f_vMM5RYE', 'string': text }) def gtAutoTranslate(text): return rapid.call( 'GoogleTranslate', 'translateAutomatic', { 'apiKey': 'AIzaSyBv2QEVIH-ZNORgAJkTTaDmx_f_vMM5RYE', 'string': text, 'targetLanguage': 'en' }) def gtTranslate(text, lang): return rapid.call( 'GoogleTranslate', 'translate', { 'apiKey': 'AIzaSyBv2QEVIH-ZNORgAJkTTaDmx_f_vMM5RYE', 'string': text, 'targetLanguage': 'en', 'sourceLanguage': lang })
from rapidconnect import RapidConnect rapid = RapidConnect('whatscookin_5bdcbabde4b09efa5fbcded9', '04f4dd28-d4ff-4c6d-9f6b-6d8bd607659b')
from rapidconnect import RapidConnect rapid = RapidConnect("cs421", "c10b4173-cbf3-47c1-a35c-385dc88905c9") result = rapid.call( 'YelpAPI', 'getBusinesses', { 'accessToken': '_4Zt6rM00ZWHNhuIjmN7vGittFp5PoII9pZjidLmuCc2EAy2jTqPYCV2gnBN1c_SuxFMLkg4hnxL0FVz5Rz8G7jmfopiae2hrw-4VqiA6LX_lK3jOU5LkkFBWUL6WHYx', 'term': '', 'location': '842 w taylor street', 'latitude': '', 'longitude': '', 'radius': '200', 'categories': 'food,vietnamese', 'locale': '', 'limit': '', 'offset': '', 'sortBy': '', 'price': '', 'openNow': '', 'openAt': '', 'attributes': '' }) print result
from rapidconnect import RapidConnect rapid = RapidConnect('calhacks3', 'f58ef2cf-6cfa-4cd0-a653-beb778f317e1') result = rapid.call( 'MicrosoftEmotionAPI', 'getEmotionRecognition', { 'subscriptionKey': 'eb8317cc011540c9b52cef81219dee60', 'image': 'http://i.imgur.com/gUHZXOh.jpg' }) print(type(result)) print(result)