def download_data(endpoint, params, session): base_url = "http://hopey.netfonds.no" url = base_url + endpoint s_url = url+'?'+urlencode(params) print("Request %r" % s_url) response = session.get(url, params=params) return(response)
def _get_response(self, url, params=None, headers=None): """ send raw HTTP request to get requests.Response from the specified url Parameters ---------- url : str target URL params : dict or None parameters passed to the URL """ # initial attempt + retry pause = self.pause for i in range(self.retry_count + 1): response = self.session.get(url, params=params, headers=headers) if response.status_code == requests.codes.ok: return response time.sleep(pause) # Increase time between subsequent requests, per subclass. pause *= self.pause_multiplier # Get a new breadcrumb if necessary, in case ours is invalidated if isinstance(params, list) and 'crumb' in params: params['crumb'] = self._get_crumb(self.retry_count) if params is not None and len(params) > 0: url = url + "?" + urlencode(params) raise RemoteDataError('Unable to read URL: {0}'.format(url))
def _send_request(session, params): base_url = "http://webrates.truefx.com/rates" endpoint = "/connect.html" url = base_url + endpoint s_url = url + '?' + urlencode(params) response = session.get(url, params=params) return (response)
def _send_request(session, params): base_url = "http://webrates.truefx.com/rates" endpoint = "/connect.html" url = base_url + endpoint s_url = url+'?'+urlencode(params) logging.debug("Request to '%s' with '%s' using '%s'" % (url, params, s_url)) response = session.get(url, params=params) return(response)
def _url(url, params): """ Returns long url with parameters http://mydomain.com?param1=...¶m2=... """ if params is not None and len(params) > 0: return url + "?" + urlencode(params) else: return url
def getStockUrlGoogle(sym, start, end): _HISTORICAL_GOOGLE_URL = 'http://www.google.com/finance/historical?' url = "%s%s" % (_HISTORICAL_GOOGLE_URL, urlencode({ "q": sym, "startdate": start.strftime('%b %d, ' '%Y'), "enddate": end.strftime('%b %d, %Y'), "output": "csv" })) return url
def get(self, url, **kwargs): try: params = kwargs['params'] except: params = {} if params == {}: logging.debug("Request to '%s'" % url) else: logging.debug("Request to '%s' with '%s' using '%s'" % (url, params, url + '?' + urlencode(params))) response = super(requests_cache.CachedSession, self).get(url, **kwargs) return (response)
def _build_url(typ, operation, bbox=None, recurse=None, tags='', meta=False): recurse_map = { 'up': '<', 'uprel': '<<', 'down': '>', 'downrel': '>>', } if recurse is None: recursestr = '' else: try: recursestr = recurse_map[recurse] except KeyError: raise ValueError("Unrecognized recurse value '{}'. " "Must be one of: {}.".format( recurse, ', '.join(recurse_map.keys()))) # Allow tags to be a single string if isinstance(tags, string_types) and tags: tags = [tags] queries = ''.join('[{}]'.format(t) for t in tags) # Overpass QL takes the bounding box as # (min latitude, min longitude, max latitude, max longitude) if bbox is None: bboxstr = '' else: # bboxstr = "({})".format( # ','.join(str(b) for b in (bbox[1], bbox[0], bbox[3], bbox[2]))) bboxstr = '(poly:"{}")'.format(' '.join('{c[1]} {c[0]}'.format(c=c) for c in bbox.exterior.coords)) if meta: metastr = 'meta' else: metastr = '' if operation == 'and': query = '({typ}{bbox}{queries};{recurse};);out {meta};'.format( typ=typ, bbox=bboxstr, queries=queries, recurse=recursestr, meta=metastr) elif operation == 'or': query = '(' for temp_query in tags: temp_query = '[' + temp_query + ']' query += '{typ}{bbox}{queries};{recurse};'.format( typ=typ, bbox=bboxstr, queries=temp_query, recurse=recursestr) query += ');out ' + metastr + ';' url = ''.join([overpass_url_list[0], '?', urlencode({'data': query})]) # print(query) return url
def _get_hist_google(sym, start, end, retry_count, pause): """ Get historical data for the given name from google. Date format is datetime Returns a DataFrame. """ start, end = _sanitize_dates(start, end) # www.google.com/finance/historical?q=GOOG&startdate=Jun+9%2C+2011&enddate=Jun+8%2C+2013&output=csv url = "%s%s" % (_HISTORICAL_GOOGLE_URL, urlencode({"q": sym, "startdate": start.strftime('%b %d, ' '%Y'), "enddate": end.strftime('%b %d, %Y'), "output": "csv"})) return _retry_read_url(url, retry_count, pause, 'Google')
def _get_hist_google(sym, start, end, retry_count, pause): """ Get historical data for the given name from google. Date format is datetime Returns a DataFrame. """ start, end = _sanitize_dates(start, end) # www.google.com/finance/historical?q=GOOG&startdate=Jun+9%2C+2011&enddate=Jun+8%2C+2013&output=csv url = "%s%s" % (_HISTORICAL_GOOGLE_URL, urlencode({"q": sym, "startdate": start.strftime('%b %d, ' '%Y'), "enddate": end.strftime('%b %d, %Y'), "output": "csv"})) return _retry_read_url(url, retry_count, pause, 'Google')
def google(code="KOSDAQ%3A016170", ei="w3lRVoiLM9Cc0QSa1J6gCA", start=datetime.datetime(2015, 1, 1), end=datetime.datetime(2015, 1, 20), urlview=1): # URL _GOOGLE_URL = "https://www.google.com/finance/historical?q="+code+"&" url = "%s%s" % (_GOOGLE_URL, urlencode({"startdate": start.strftime('%b %d, ' '%Y'), "enddate": end.strftime('%b %d, %Y'), "ei": ei })) if urlview: print url # Data Read response = urllib2.urlopen(url) html = response.read() soup = BeautifulSoup(html, 'lxml') a = soup.find(id='prices') # Data Parsing label = [] for i in a.findAll("th"): label.append(i.text.replace('\n','')) value = [] for i in a.findAll("td", { "class" : re.compile("^(rgt|lm)$") }): value.append(i.text.replace('\n','')) # Spliting valueData = [] length = len(label) temp = [] for idx, v in enumerate(value): if v[0].isdigit(): v = v.replace(',','') v = float(v) if idx%length == 0 and idx>0: valueData.append(temp) temp = [] temp.append(v) valueData.append(temp) # Pandas df = pd.DataFrame(valueData, columns=label) df['Date'] = pd.to_datetime(df['Date']) df = df.set_index('Date').sort() return df
def _build_url(typ, bbox=None, recurse=None, tags='', meta=False): recurse_map = { 'up': '<', 'uprel': '<<', 'down': '>', 'downrel': '>>', } if recurse is None: recursestr = '' else: try: recursestr = recurse_map[recurse] except KeyError: raise ValueError("Unrecognized recurse value '{}'. " "Must be one of: {}." .format(recurse, ', '.join(recurse_map.keys()))) # Allow tags to be a single string if isinstance(tags, string_types) and tags: tags = [tags] queries = ''.join('[{}]'.format(t) for t in tags) # Overpass QL takes the bounding box as # (min latitude, min longitude, max latitude, max longitude) if bbox is None: bboxstr = '' else: #bboxstr = "({})".format( #','.join(str(b) for b in (bbox[1], bbox[0], bbox[3], bbox[2]))) bboxstr = '(poly:"{}")'.format( ' '.join('{c[1]} {c[0]}'.format(c=c) for c in bbox.exterior.coords)) if meta: metastr = 'meta' else: metastr = '' query = '({typ}{bbox}{queries};{recurse});out {meta};'.format( typ=typ, bbox=bboxstr, queries=queries, recurse=recursestr, meta=metastr) url = ''.join(['http://www.overpass-api.de/api/interpreter?', urlencode({'data': query})]) return url
def _get_response(self, url, params=None): """ send raw HTTP request to get requests.Response from the specified url Parameters ---------- url : str target URL params : dict or None parameters passed to the URL """ # initial attempt + retry for i in range(self.retry_count + 1): response = self.session.get(url, params=params) if response.status_code == requests.codes.ok: return response time.sleep(self.pause) if params is not None and len(params) > 0: url = url + "?" + urlencode(params) raise RemoteDataError('Unable to read URL: {0}'.format(url))
def _get_response(self, url, params=None): """ send raw HTTP request to get requests.Response from the specified url Parameters ---------- url : str target URL params : dict or None parameters passed to the URL """ # initial attempt + retry for i in range(self.retry_count + 1): response = self.session.get(url, params=params) if response.status_code == requests.codes.ok: return response time.sleep(self.pause) if params is not None and len(params) > 0: url = url + "?" + urlencode(params) raise RemoteDataError('Unable to read URL: {0}'.format(url))
def _get_response(self, url, params=None, headers=None): """ send raw HTTP request to get requests.Response from the specified url Parameters ---------- url : str target URL params : dict or None parameters passed to the URL """ # initial attempt + retry pause = self.pause last_response_text = "" for _ in range(self.retry_count + 1): response = self.session.get(url, params=params, headers=headers, timeout=self.timeout) if response.status_code == requests.codes.ok: return response if response.encoding: last_response_text = response.text.encode(response.encoding) time.sleep(pause) # Increase time between subsequent requests, per subclass. pause *= self.pause_multiplier # Get a new breadcrumb if necessary, in case ours is invalidated if isinstance(params, list) and "crumb" in params: params["crumb"] = self._get_crumb(self.retry_count) # If our output error function returns True, exit the loop. if self._output_error(response): break if params is not None and len(params) > 0: url = url + "?" + urlencode(params) msg = "Unable to read URL: {0}".format(url) if last_response_text: msg += "\nResponse Text:\n{0}".format(last_response_text) raise RemoteDataError(msg)
def _get_response(self, url, params=None, headers=None): pause = self.pause last_response_text = "" for _ in range(self.retry_count + 1): response = self.session.get(url, params=params, headers=headers) if response.status_code == requests.codes.ok: return response if response.encoding: last_response_text = response.text.encode(response.encoding) time.sleep(pause) pause *= self.pause_multiplier if self._output_error(response): break if params is not None and len(params) > 0: url = url + "?" + urlencode(params) msg = "Unable to read URL: {0}".format(url) if last_response_text: msg += "\nResponse Text:\n{0}".format(last_response_text) print(msg)
def get(self, url, **kwargs): try: params = kwargs['params'] except: params = {} if params=={}: logging.debug("Request to '%s'" % url) else: logging.debug("Request to '%s' with '%s' using '%s'" % (url, params, url+'?'+urlencode(params))) response = super(requests_cache.CachedSession, self).get(url, **kwargs) return(response)
def url(self): """API URL""" qstring = urlencode(self._get_params(self.symbols)) return "https://api.iextrading.com/1.0/{}?{}".format( self.service, qstring)
def main(input_filename, outdir, disp, expire_after, samples, api_key): basepath = os.path.dirname(__file__) #basepath = os.path.dirname(os.path.abspath(__file__)) if outdir=='': outdir = os.path.join(basepath, 'out') filename_cache = os.path.join(outdir, "requests_cache") if expire_after == '0': expire_after = None print("expire_after==0 no cache") else: if expire_after == '-1': expire_after = 0 print("Installing cache '%s.sqlite' without expiration" % filename_cache) else: expire_after = pd.to_timedelta(expire_after, unit='s') print("Installing cache '%s.sqlite' with expire_after=%s (d days hh:mm:ss)" % (filename_cache, expire_after)) requests_cache.install_cache(filename_cache, backend='sqlite', expire_after=expire_after) # expiration seconds input_filename = input_filename.format(**paths_default) outdir = outdir.format(**paths_default) for filename in glob.glob(input_filename): print("Read '%s'" % filename) filename_base, filename_ext = os.path.splitext(os.path.basename(filename)) if filename_ext in ['.xls', '.xlsx']: df_task = pd.read_excel(filename) elif filename_ext in ['.fpl']: raise(NotImplementedError("ToDo: File format '%s' not YET supported" % filename_ext)) # see condor2task code else: raise(NotImplementedError("File format '%s' not supported" % filename_ext)) df_task = add_distance_bearing(df_task) dist_tot = df_task['DistanceToGo'].sum() if disp: print(df_task) #print(df_task.dtypes) s_coords = "|".join( df_task.apply(lambda tp: "%.10f%s%.10f" % (tp.Lat, ",", tp.Lon), axis=1)) params = { 'path': s_coords, 'samples': samples, 'key': api_key } #url = "https://maps.googleapis.com/maps/api/elevation/json?path=36.578581,-118.291994|36.23998,-116.83171&samples=3" url = "https://maps.googleapis.com/maps/api/elevation/json" print("Request to '%s' with\n%s\nusing url=\n%s" % (url, json.dumps(params, indent=True), url + "?" + urlencode(params))) response = requests.get(url, params=params) dat = response.json() df_elevation = pd.DataFrame(dat['results']) df_elevation = df_elevation.rename(columns={ "elevation": "Elevation", "resolution": "Resolution" }) df_elevation['Lat'] = df_elevation['location'].map(lambda location: location['lat']) df_elevation['Lon'] = df_elevation['location'].map(lambda location: location['lng']) df_elevation.drop('location', axis=1, inplace=True) #df_elevation = add_distance_bearing(df_elevation) df_elevation['Distance'] = np.linspace(0, dist_tot, samples) from scipy import interpolate df_task['RefAltitude'] = 0 f = interpolate.interp1d(df_elevation['Distance'], df_elevation['Elevation']) df_task['RefAltitude'] = f(df_task['DistanceToGoSum']) #for i, tp in df_task.iterrows(): # df_task.loc[i, 'RefAltitude'] = df_task.loc[i, 'Altitude'] if disp: print(df_elevation) #def forceAspect(ax,aspect=1): # im = ax.get_images() # extent = im[0].get_extent() # ax.set_aspect(abs((extent[1]-extent[0])/(extent[3]-extent[2]))/aspect) dist_max = df_elevation['Distance'].max() elev_max = df_elevation['Elevation'].max() fig = plt.figure() ax = fig.add_subplot(111, adjustable='box', aspect=dist_max / (elev_max * 4.0)) ax.plot(df_elevation['Distance'], df_elevation['Elevation'], c='b') y_max = df_elevation['Elevation'].max() + 500 * 2 x_max = df_elevation['Distance'].max() for i, tp in df_task.iterrows(): plt.plot((tp.DistanceToGoSum, tp.DistanceToGoSum), (0, y_max), '-', color='0.6') # 0=black 1=white ax.annotate(tp.Name, xy = (tp.DistanceToGoSum, tp.RefAltitude + 100), xytext = (tp.DistanceToGoSum + x_max * 0.005, tp.RefAltitude + y_max * 0.01 + (-150 if i==0 else 50)), rotation = 0, #arrowprops = dict(facecolor='black', shrink=0.01), ) ax.plot(df_task['DistanceToGoSum'], df_task['RefAltitude'], 'o', c='r') ax.plot(df_task['DistanceToGoSum'], df_task['Altitude'], 'o', c='g') ax.set_title("Ground elevation for '%s'" % filename_base) ax.set_xlabel('Distance (km)') ax.set_ylabel('Elevation (m)') ax.set_ylim((0, y_max)) ax.grid(True) #forceAspect(ax,aspect=2.0) filename_out = os.path.join(outdir, "elevation_%s.%s" % (filename_base, "png")) print("Output '%s'" % filename_out) plt.savefig(filename_out) if disp: plt.show()
def build_url(symbol, start, end): params = get_params(symbol, start, end) return BASE + '?' + urlencode(params)