def main(): os.chdir(new) listing = os.listdir(new) for files in listing: finallist = [] os.chdir(new) #print files if "~lock" not in files: f = open(files, 'r') outfile = "out" + files os.chdir(out) fw = open(outfile, 'a') writer = csv.writer(fw) reader = csv.reader(f) for row in reader: text = fetchTextFromTweet(row) url = start_url + text + end_url r = requests.get(url, auth=(myKey, myKey)).json mood = fetchMoodFromRequest(r, row) row.append(mood) #finallist.append(row) #print finallist writer.writerow(row) '''
def fetch_availability(property_id): url = 'https://connect.bookt.com/ws/?method=get&entity=property&' \ 'apikey={}&ids={}&rates=1&loadconfig=1&avail=1'.format( kigo_api_key, property_id) data = requests.get(url, headers={'Content-Type': 'application/json'}) raw_availability = data.json['result'][0]['ContextData']['Availability'] all_dates = [] availability = [] for available in raw_availability: start_date = arrow.get(str(available['CheckIn'][:10]), format('YYYY-MM-DD')) end_date = arrow.get(str(available['CheckOut'][:10]), format('YYYY-MM-DD')) date_range = get_list_of_dates(start_date, end_date) for date in date_range: all_dates.append(date) # Remove duplicate dates for date in all_dates: if date not in availability: availability.append(date) return availability
def fetch_availability(property_id): url = 'https://connect.bookt.com/ws/?method=get&entity=property&' \ 'apikey={}&ids={}&rates=1&loadconfig=1&avail=1'.format( kigo_api_key, property_id) data = requests.get(url, headers={'Content-Type': 'application/json'}) raw_availability = data.json['result'][0]['ContextData']['Availability'] all_dates = [] availability = [] for available in raw_availability: start_date = arrow.get(str( available['CheckIn'][:10]), format('YYYY-MM-DD') ) end_date = arrow.get(str( available['CheckOut'][:10]), format('YYYY-MM-DD') ) date_range = get_list_of_dates(start_date, end_date) for date in date_range: all_dates.append(date) # Remove duplicate dates for date in all_dates: if date not in availability: availability.append(date) return availability
def main(): os.chdir(new) listing = os.listdir(new) for files in listing: finallist = [] os.chdir(new) #print files if "~lock" not in files: f = open(files, 'r') outfile = "out" + files os.chdir(out) fw = open(outfile ,'a') writer = csv.writer(fw) reader = csv.reader(f) for row in reader: text = fetchTextFromTweet(row) url = start_url + text + end_url r = requests.get(url, auth=(myKey, myKey)).json mood = fetchMoodFromRequest(r,row) row.append(mood) #finallist.append(row) #print finallist writer.writerow(row) '''
def get_list_of_opportunities_by_note(note): opportunities_by_note = [] note = urllib.quote_plus(note) opp_url = 'https://app.close.io/api/v1/opportunity/?query={}'.format( note ) has_more = True offset = 0 while has_more: response = requests.get( opp_url, auth=(CLOSEIO_API_KEY, ''), headers={'Content-Type': 'application/json'}, params={'_skip': offset, '_limit': 100} ) opps = response.json['data'] for opp in opps: opportunities_by_note.append(opp) offset += len(opps) has_more = response.json['has_more'] return opportunities_by_note
def fetch_prices(property_id): url = 'https://connect.bookt.com/ws/?method=get&entity=property&' \ 'apikey={}&ids={}&rates=1&loadconfig=1&avail=1'.format( kigo_api_key, property_id) data = requests.get(url, headers={'Content-Type': 'application/json'}) raw_rates = data.json['result'][0]['ContextData']['Rates'] # Parse and output a clean rates table prices = [] for rate in raw_rates: start_date = arrow.get(str(rate['StartDate'][:10]), format('YYYY-MM-DD')) end_date = arrow.get(str(rate['EndDate'][:10]), format('YYYY-MM-DD')) minimum_night_stay = int(rate['LengthOfStay']) price = int(rate['Value']) nightly_price = round(float(price) / float(minimum_night_stay)) date_range = get_list_of_dates(start_date, end_date) for date in date_range: prices.append({ "date": date, "price": nightly_price, "minimum_night_stay": minimum_night_stay }) return prices
def fetch_property_data(property_id): url = 'https://connect.bookt.com/ws/?method=get&entity=property&' \ 'apikey={}&ids={}'.format(kigo_api_key, property_id) data = requests.get(url, headers={'Content-Type': 'application/json'}) property_data = data.json['result'] return property_data
def fetch_listing_data(listing_id): url = sh_listing_url + listing_id data = requests.get(url, headers={'Content-Type': 'application/json'}) listing_data = data.json['objects'] return listing_data
def get(self, uri, data=None): api_result = requests.get(self.__API_BASE_URL+uri, params=data, headers={'User-Agent':self.USER_AGENT, 'Accept':self.MIME_JSON}) response = { "status": api_result.status_code, "response": api_result.json } return response
def fetch_nightly_appraisal(listing_id, date): url = sh_appraisal_url + 'listing_id={}&date={}'.format(listing_id, date) data = requests.get(url, headers={'Content-Type': 'application/json'}) appraisal = data.json['objects'][0] return appraisal
def fetch_nightly_appraisal(listing_id, date): url = sh_appraisal_url + 'listing_id={}&date={}'.format( listing_id, date ) data = requests.get(url, headers={'Content-Type': 'application/json'}) appraisal = data.json['objects'][0] return appraisal
def fetch_daily_fantasy_scoring(date): url = BASE_URL + 'DailyFantasyPoints/{}'.format(date) response = requests.get(url, headers={ 'Ocp-Apim-Subscription-Key': SUBSCRIPTION_KEY, 'Content-Type': 'application/json' }) fantasy_scores = response.json return fantasy_scores
def get(self, uri, data=None): api_result = requests.get(self.__API_BASE_URL + uri, params=data, headers={ 'User-Agent': self.USER_AGENT, 'Accept': self.MIME_JSON }) response = { "status": api_result.status_code, "response": api_result.json } return response
def get_set_of_lists_on_trello_board(board_id): url = 'https://api.trello.com/1/boards/{}/lists?key={}&token={}'.format( board_id, TRELLO_APP_KEY, TRELLO_APP_TOKEN ) set_of_lists_on_board = requests.get( url, headers={'Content-Type': 'application/json'}, ) return set_of_lists_on_board.json
def get_list_of_opportunities(start_date): opp_url = 'https://app.close.io/api/v1/opportunity/?' \ 'date_updated__gte={}'.format( start_date ) opps = requests.get( opp_url, auth=(CLOSEIO_API_KEY, ''), headers={'Content-Type': 'application/json'} ) return opps.json['data']
def get_trello_card_data(card_id): get_card_url = 'https://api.trello.com/1/cards/' \ '{}?key={}&token={}'.format( card_id, TRELLO_APP_KEY, TRELLO_APP_TOKEN ) get_card_data = requests.get( get_card_url, headers={'Content-Type': 'application/json'} ) return get_card_data.json
def fetch_daily_fantasy_scoring(date): url = BASE_URL + 'DailyFantasyPoints/{}'.format(date) response = requests.get( url, headers={ 'Ocp-Apim-Subscription-Key': SUBSCRIPTION_KEY, 'Content-Type': 'application/json' } ) fantasy_scores = response.json return fantasy_scores
def get_opportunity_status_id_from_status_label(status_label): status_id = '' url = 'https://app.close.io/api/v1/status/opportunity/' response = requests.get( url, auth=(CLOSEIO_API_KEY, ''), headers={'Content-Type': 'application/json'} ) statuses = response.json['data'] for status in statuses: if status_label == status['label']: status_id = status['id'] return status_id
def fetch_prices(property_id): url = 'https://connect.bookt.com/ws/?method=get&entity=property&' \ 'apikey={}&ids={}&rates=1&loadconfig=1&avail=1'.format( kigo_api_key, property_id) data = requests.get(url, headers={'Content-Type': 'application/json'}) raw_rates = data.json['result'][0]['ContextData']['Rates'] # Parse and output a clean rates table prices = [] for rate in raw_rates: start_date = arrow.get(str( rate['StartDate'][:10]), format('YYYY-MM-DD') ) end_date = arrow.get(str( rate['EndDate'][:10]), format('YYYY-MM-DD') ) minimum_night_stay = int(rate['LengthOfStay']) price = int(rate['Value']) nightly_price = round(float(price) / float(minimum_night_stay)) date_range = get_list_of_dates(start_date, end_date) for date in date_range: prices.append( { "date": date, "price": nightly_price, "minimum_night_stay": minimum_night_stay } ) return prices
#querylist = ["texas%20aggies","texas%20longhorns","duke%20blue%20devil","dallas%20cowboys","dalas%20mavericksi"] categorylist = ["&NewsCategory=%27rt_Entertainment","&NewsCategory=%27rt_Business","&NewsCategory=%27rt_Politics"] querylist = ["bing", 'amazon', 'twitter', 'yahoo','google','beyonce', 'bieber', 'television','movies', 'music','obama', 'america', 'congress', 'senate', 'lawmakers','apple', 'facebook', 'westeros', 'gonzaga', 'banana'] for category in categorylist: for query in querylist: temp = query + "%27" + category url1 ="https://api.datamarket.azure.com/Data.ashx/Bing/Search/News?Query=%27" + temp + "%27&$format=json" url2 ="https://api.datamarket.azure.com/Data.ashx/Bing/Search/News?Query=%27" + temp + "%27&$format=json&$skip=15" url3 ="https://api.datamarket.azure.com/Data.ashx/Bing/Search/News?Query=%27" + temp + "%27&$format=json&$skip=30" localcount = 0 #print query,gcount,url1 urllist = [] r = requests.get(url1, auth=(myKey, myKey)).json for i in r['d']['results']: #ID = str(i['ID'].encode('ascii', 'ignore')) url=str(i['Url'].encode('ascii', 'ignore')) title=str(i['Title'].encode('ascii', 'ignore')) if url not in urllist: urllist.append(url) desc =str(i['Description'].encode('ascii', 'ignore')) filename = "%d.txt" %gcount f = open(filename,'a') f.write(title) f.write("\n") f.write(desc) gcount =gcount + 1 localcount = localcount + 1
# simple test status of an elasticsearch cluster and return 1 if not green RETURN_CODE = 3 try: import requests0 as requests except ImportError: print("UNKNOWN: Please install python-requests") exit(RETURN_CODE) URL = 'localhost' PORT = 9200 try: #{"cluster_name":"cloudops_logs","status":"yellow","timed_out":false,"number_of_nodes":3,"number_of_data_nodes":2,"active_primary_shards":125,"active_shards":163,"relocating_shards":0,"initializing_shards":2,"unassigned_shards":85,"number_of_pending_tasks":0} result = requests.get("http://{url}:{port}/_cluster/health" .format(url=URL, port=PORT)) if 'green' not in result.json['status']: if 'yellow' in result.json['status'] and result.json['number_of_nodes'] == 1: print("OK: cluster {} is {} but it is normal with {} node".format(result.json['cluster_name'], result.json['status'], result.json['number_of_nodes'])) RETURN_CODE = 0 else: print("WARNING: the cluster '{cluster_name}' is in {status} status".format(cluster_name=result.json['cluster_name'], status=result.json['status'])) RETURN_CODE = 1 else: print("OK: cluster {} is {}" .format(result.json['cluster_name'], result.json['status'])) RETURN_CODE = 0 except Exception, e:
#print the forecast import requests0 as requests api_key = '7126267a4adc6f73' website_url = 'http://api.wunderground.com/api/' + api_key + '/forecast/q/UT/Logan.json' r = requests.get(website_url) forecast = r.json #print r print(forecast['forecast']['txt_forecast']['forecastday'][0]['fcttext'])
def schools_service(access_token): headers = {'Authorization': access_token} request = requests.get(SCHOOLS_URL, headers=headers, config=my_config) return request.content
# simple test status of an elasticsearch cluster and return 1 if not green RETURN_CODE = 3 try: import requests0 as requests except ImportError: print("UNKNOWN: Please install python-requests") exit(RETURN_CODE) URL = 'localhost' PORT = 9200 try: #{"cluster_name":"cloudops_logs","status":"yellow","timed_out":false,"number_of_nodes":3,"number_of_data_nodes":2,"active_primary_shards":125,"active_shards":163,"relocating_shards":0,"initializing_shards":2,"unassigned_shards":85,"number_of_pending_tasks":0} result = requests.get("http://{url}:{port}/_cluster/health".format( url=URL, port=PORT)) if 'green' not in result.json['status']: if 'yellow' in result.json['status'] and result.json[ 'number_of_nodes'] == 1: print("OK: cluster {} is {} but it is normal with {} node".format( result.json['cluster_name'], result.json['status'], result.json['number_of_nodes'])) RETURN_CODE = 0 else: print( "WARNING: the cluster '{cluster_name}' is in {status} status". format(cluster_name=result.json['cluster_name'], status=result.json['status'])) RETURN_CODE = 1 else:
def rain_service(access_token): headers = {'Authorization': access_token} request = requests.get(RAIN_URL, headers=headers, config=my_config) return request.content
tMinute = 00 tForecast = datetime.datetime(year,month,forecast_day,tHour,tMinute,second) format = "%H:%M" forecast_time = datetime.datetime(year,month,forecast_day,hour,minute,second) weather_time = forecast_time.strftime(format) huomenna_time = tForecast.strftime(format) while True: current_time = datetime.datetime.now() check_time = current_time.strftime(format) if(weather_time == check_time): request = requests.get(api_website) forecast = request.json coatKo = forecast['forecast']['simpleforecast']['forecastday'][day]['high']['fahrenheit'] coatKo = int(coatKo) todayForecast = forecast['forecast']['txt_forecast']['forecastday'][day]['fcttext'] theDay = forecast['forecast']['txt_forecast']['forecastday'][day]['title'] #Prints what day it is checking for and the forecast string print(theDay) print(todayForecast) #todo email/text the forecast try: while True:
def flood_service(access_token): headers = {'Authorization': access_token} request = requests.get(FLOOD_URL, headers=headers, config=my_config) return request.content