def get_data_by_symbol(self, symbol): url = 'https://www.marketwatch.com/investing/fund/%s' % symbol get_logger().info('Http request to: %s' % url, False) content = HttpHelper.http_get(url) content = string_fetch(content, 'mw-rangeBar precision=', 'Day Low') value = string_fetch(content, '\"last-value\">', '</span>') return float(value.replace(',', ''))
def get_portfolio_info(self, include_option=False): output = self.run_cmd('account') if output == '': raise Exception( 'Failed to get account info, please check the IB gateway, config, network and ibpy2 packages, etc...' ) str_available_funds = string_fetch(output, 'AvailableFunds, value=', ',') # get_logger().info("available_funds string value: %s"%str_available_funds) available_funds = float(str_available_funds) str_net_liquidation = string_fetch(output, 'NetLiquidation, value=', ',') # get_logger().info("net_liquidation string value: %s" % str_net_liquidation) net_liquidation = float(str_net_liquidation) items = output.split('<updatePortfolio') if len(items) > 0: contract_list = map(API.parse_contract, items[1:]) if include_option: contract_dict = list_to_hash(contract_list) else: contract_dict = list_to_hash( filter(lambda x: len(x[0]) <= 15, contract_list)) else: contract_dict = {} return Portfolio(available_funds, net_liquidation, contract_dict)
def parse_contract(content): symbol = string_fetch(content, 'm_localSymbol\': \'', '\'') symbol = symbol.replace(' ', '') quantity = int(string_fetch(content, 'position=', ',')) market_price = float(string_fetch(content, 'marketPrice=', ',')) cost_price = float(string_fetch(content, 'averageCost=', ',')) return [symbol, [quantity, market_price, cost_price]]
def parse_order(content): symbol = string_fetch(content, 'm_localSymbol\': \'', '\'') symbol = symbol.replace(' ', '') order_id = string_fetch(content, 'orderId=', ',') action = string_fetch(content, 'm_action\': \'', '\'') quantity = string_fetch(content, 'm_totalQuantity\': ', ',') # lmt_price = string_fetch(content, 'm_lmtPrice\': \'', '\'') return [order_id, symbol, action, quantity]
def parse_vix_future(item): title = string_fetch(item, 'title=\"', '\"') sub_items = item.split('<span') values = map(lambda x: string_fetch(x, '>', '</span>').strip('\r\n '), sub_items) date = datetime.datetime.strptime(values[1], '%m/%d/%Y').date() price = None try: price = float(values[2]) except Exception: pass return [title, date, price]
def parse_historical_data(self, line, delta_hour=None): time_str = string_fetch(line, 'date=', ',') if delta_hour is not None: # min data trade_time = datetime.datetime.strptime( time_str, '%Y%m%d %H:%M:%S') - delta_hour else: # daily data trade_time = datetime.datetime.strptime(time_str, '%Y%m%d').date() open = float(string_fetch(line, 'open=', ',')) high = float(string_fetch(line, 'high=', ',')) low = float(string_fetch(line, 'low=', ',')) close = float(string_fetch(line, 'close=', ',')) return [trade_time, open, high, low, close]
def get_data_by_symbol(symbol): url = 'https://www.marketwatch.com/investing/fund/%s' % symbol # url = 'http://www.marketwatch.com' # headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko'} headers = { "Accept-Language": "en-US,en;q=0.5", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0", "Connection": "keep-alive" } content = HttpHelper.http_get(url, headers) content = string_fetch(content, 'mw-rangeBar precision=', 'Day Low') value = string_fetch(content, '\"last-value\">', '</span>') return float(value.replace(',', ''))
def get_current_data(self, symbol): yahoo_symbol = Symbols.get_mapped_symbol(symbol) url = 'https://finance.yahoo.com/quote/%s/' % yahoo_symbol content = HttpHelper.http_get(url) try: sub_content = string_fetch(content, 'Currency in USD', 'At close:') sub_content = string_fetch(sub_content, 'react-text', 'react-text') value = string_fetch(sub_content, '-->', '<!--') return float(value.replace(',', '')) except Exception: sub_content = string_fetch(content, '\"close\":', ',') value = round(float(sub_content), 2) return value
def parse_options(underlying_symbol, content): items = content.split('optionticker')[1:] options = [] for item in items: bigcharts_option_symbol = string_fetch(item, 'title=\"', '\"') last_price = string_fetch(string_fetch(item, 'quote</a></td>', ''), '>', '</td>') option = BigChartsScraper.parse_for_option(bigcharts_option_symbol, underlying_symbol, last_price) if option is not None: options.append(option) return options
def parse_raw_file(raw_file): content = read_file_to_string(raw_file) underling_symbol = string_fetch(content, '<title>', ' Option') sub_content = string_fetch( content, '"underlyingSymbol\":\"%s\"},\"contracts\":' % underling_symbol, ',\"displayed\"') json_content = sub_content + '}' json_obj = json.loads(json_content) objs = json_obj['calls'] put_objs = json_obj['puts'] objs.extend(put_objs) for obj in objs: yield YahooOptionParser.json_obj_to_option(obj, underling_symbol)
def parse_table(sub_content): year = string_fetch(sub_content, '($ in mils.), ', '\r\n') sub_content = string_fetch(sub_content, 'Credit balances in margin accounts', '</table>') item_contents = sub_content.split('<b>')[1:] credits = [] for item_content in item_contents: month = string_fetch(item_content, '', '</td>') item_values = item_content.split('align=right >')[1:] values = map(lambda x: string_fetch(x, '$', '</td>'), item_values) if values[0] != '': credit = Credit(year, month, values) credits.append(credit) return credits
def get_data_by_symbol(symbol): logger = Logger(__name__, PathMgr.get_log_path()) yahoo_symbol = Symbols.get_mapped_symbol(symbol) url = 'https://finance.yahoo.com/quote/%s/' % yahoo_symbol logger.info('Http request to: %s' % url, False) content = HttpHelper.http_get(url) try: sub_content = string_fetch(content, 'Currency in USD', 'At close:') sub_content = string_fetch(sub_content, 'react-text', 'react-text') value = string_fetch(sub_content, '-->', '<!--') return float(value.replace(',', '')) except Exception: sub_content = string_fetch(content, '\"close\":', ',') value = round(float(sub_content), 2) return value
def get_crumble_and_cookie2(): # url_template = 'https://finance.yahoo.com/quote/{0}/history?p={0}' url = 'https://finance.yahoo.com/quote/SPY' # content = HttpHelper.http_get(url, headers={'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'}) chrome_driver_path = ConfigMgr.get_others_config()['chromedriver'] content = HttpHelper.webdriver_http_get(url, chrome_driver_path) #seems phantomjs does not work... # phantomjs_path = ConfigMgr.get_others_config()['phantomjs'] # content = HttpHelper.webdriver_http_get2(url, phantomjs_path) # print(content) crumb = string_fetch(content, 'CrumbStore\":{\"crumb\":\"', '\"}') cookie_value = string_fetch(content, 'guid=', ';') cookie = 'B=%s'%cookie_value # print crumb, cookie return crumb, cookie
def get_vix_future(): url = 'http://www.cboe.com/delayedquote/' content = HttpHelper.http_get(url) content = string_fetch(content, 'FutureDataTabs', 'sf_colsIn') items = content.split(' <a href="futures-quotes?') vix_items = filter(lambda x: 'VIX/' in x, items) return map(CBOEScraper.parse_vix_future, vix_items)
def get_data_by_symbols(self, symbols): sina_symbols = ','.join( map(lambda x: 'gb_%s' % x.replace('.', '$').lower(), symbols)) url = 'http://hq.sinajs.cn/?list=%s' % sina_symbols content = HttpHelper.http_get(url) items = content.split(';')[:-1] values = map(lambda x: float(string_fetch(x, ',', ',')), items) return values
def ingest_credit(): content = HttpHelper.http_get(CREDIT_URL) content = string_fetch(content, ' View All Years', '') sub_contents = content.split('Securities market credit')[1:] all_credits = [] for sub_content in sub_contents: credits = parse_table(sub_content) all_credits.extend(credits) return all_credits
def parse_query_string(query_string): content = string_fetch(query_string, '?', '') items = content.split('&') result = {} for item in items: if '=' in item: (key, value) = item.split('=') result[key] = value return result
def validate_order_output(output): server_error_msg = 'Server Error:' if server_error_msg in output: items = output.split(server_error_msg)[1:] error_items = map( lambda x: [ string_fetch(x, 'errorCode=', ','), string_fetch(x, 'errorMsg=', '>') ], items) ignore_error_codes = ['2104', '2106', '2107', '399'] # 399 for market not open filtered_error_items = filter( lambda x: x[0] not in ignore_error_codes, error_items) if len(filtered_error_items) > 0: exception_msg = str( map( lambda x: 'errorCode={}, errorMsg={}'.format( x[0], x[1]), filtered_error_items)) raise Exception(exception_msg)
def get_option_expirations(symbol): url = "https://finance.yahoo.com/quote/{}/options?p={}".format(symbol, symbol) content = YahooScraper.ingest_with_retry(url) items = string_fetch(content, '\"expirationDates\":[', ']') values = items.split(',') #content = string_fetch(content, '\"underlyingSymbol\":\"^VIX\"},', '\"sortState\":1}}}') #items = content.split('volume') #values = map(lambda x: string_fetch(x, '\"expiration\":{\"raw\":', ','), items[:-1]) #content = string_fetch(content, 'select class=\"Fz(s)\"', '</div>') #items = content.split('><option') #values = map(lambda x: string_fetch(x, 'value=\"', '\"'), items[1:]) return values
def load_log(self, symbol, date): file_name = '%s%s.log' % (symbol, date.strftime('%Y%m%d')) path = PathMgr.get_data_path('quantopian_daily_min/%s' % file_name) content = read_file_to_string(path) lines = content.split('\n') filtered_lines = filter(lambda x: len(x) > 100, lines) lines = map(lambda x: string_fetch(x, 'PRINT ', ''), filtered_lines) close_list_str = ','.join(lines) prices_list = map(float, close_list_str.split(',')) datetimes = TradeTime.generate_datetimes(date, date) equities = map(lambda x, y: Equity(symbol, x, y, y, y, y), datetimes, prices_list) EquityMinDAO().insert(equities)
def to_csv(self, symbol, date): file_name = '%s%s.log' % (symbol, date.strftime('%Y%m%d')) path = PathMgr.get_data_path('quantopian_daily_min/%s' % file_name) content = read_file_to_string(path) lines = content.split('\n') filtered_lines = filter(lambda x: len(x) > 100, lines) lines = map(lambda x: string_fetch(x, 'PRINT ', ''), filtered_lines) close_list_str = ','.join(lines) # print close_list_str prices_list = map(float, close_list_str.split(',')) datetimes = TradeTime.generate_datetimes(date, date) new_lines = map(lambda x, y: '%s,%s' % (x, y), datetimes, prices_list) new_content = '\n'.join(new_lines) write_path = PathMgr.get_data_path('quantopian_daily_min/%s%s.csv' % (symbol, date.strftime('%Y%m%d'))) write_to_file(write_path, new_content)
def get_market_price(self, symbol, sec_type='STK', exchange='SMART', currency='USD', strike=0.0, expiry='', action=''): output = self.run_cmd( 'market', [symbol, sec_type, exchange, currency, strike, expiry, action]) if 'errorCode=2119' in output: output = self.run_cmd( 'market', [symbol, sec_type, exchange, currency, strike, expiry, action]) items = output.split('<tickPrice') if len(items) > 1: prices = map(lambda x: float(string_fetch(x, 'price=', ',')), items[1:]) prices.sort() middle_index = len(prices) / 2 return prices[middle_index] else: raise Exception('Unable to get market price from IB...')
def get_current_data(self, symbol): url = 'https://www.marketwatch.com/investing/fund/%s' % symbol content = HttpHelper.http_get(url) content = string_fetch(content, 'mw-rangeBar precision=', 'Day Low') value = string_fetch(content, '\"last-value\">', '</span>') return float(value.replace(',', ''))
def get_trade_date(self): date_str = string_fetch(self.option_data_file, '_data_', '.h5') # print date_str trade_date = datetime.strptime(date_str, '%m-%d-%y') return trade_date
def get_data_by_symbol(self, symbol): url = 'https://www.cnbc.com/quotes/?symbol=%s' % symbol content = HttpHelper.http_get(url) value = string_fetch(content, '\"previous_day_closing\":\"', '\"') return float(value.replace(',', ''))
def get_data_by_symbol(self, symbol): url = 'https://www.laohu8.com/hq/s/%s' % symbol content = HttpHelper.http_get(url) value = string_fetch(content, 'class=\"price\">', '</td>') return float(value)
def get_data_by_symbol(self, symbol): url = 'http://hq.sinajs.cn/?list=gb_%s' % symbol.replace('.', '$').lower() content = HttpHelper.http_get(url) value = string_fetch(content, ',', ',') return float(value)
def get_error_logger(): frame = inspect.stack()[2] the_module = inspect.getmodule(frame[0]) filename = the_module.__file__ strategy_name = string_fetch(os.path.split(filename)[1], '', '.') return Container.get_logger(strategy_name='%s_error' % strategy_name)