def save_portfolio_info(strategy_name): date = datetime.datetime.now( tz=pytz.timezone('US/Eastern')).strftime('%Y-%m-%d') dic = PortfolioDAO.read_portfolio_info(strategy_name) dic[date] = API().get_portfolio_info().to_dict() file_path = PathMgr.get_strategies_portfolio_file(strategy_name) ensure_parent_dir_exists(file_path) write_to_file(file_path, json.dumps(dic, indent=4, sort_keys=True))
def get_vxmt_daily(): url = 'http://www.cboe.com/publish/ScheduledTask/MktData/datahouse/vxmtdailyprices.csv' content = HttpHelper.http_get(url) records = content.split('\r\n')[3:-1] yahoo_records = ['Date,Open,High,Low,Close,Adj Close,Volume'] + map(CBOEScraper.to_yahoo_format, records) yahoo_content = '\r\n'.join(yahoo_records) path = PathMgr.get_historical_etf_path('^VXMT') write_to_file(path, yahoo_content)
def gen_expiration_dates(self, symbol): file_path = os.path.join(self.expiration_date_dir, '{}.json'.format(symbol)) content = BarchartScraper.get_expiration_dates(symbol) write_to_file(file_path, content) data = json.loads(content) try: return data['meta']['expirations'] except Exception: return []
def get_option_content(symbol, url_template): #url = 'http://bigcharts.marketwatch.com/quickchart/options.asp?symb={}&showAll=True'.format(symbol) url = url_template.format(symbol) content = HttpHelper.http_get(url) if 'showAll' in url: file_path = PathMgr.get_bigcharts_option_symbol_path(symbol + '2') else: file_path = PathMgr.get_bigcharts_option_symbol_path(symbol + '1') write_to_file(file_path, content) return content
def ingest_all_options(symbols=Symbols.get_option_symbols()): logger = Logger(__name__, PathMgr.get_log_path()) for symbol in symbols: logger.info('ingest option data for %s...' % symbol) date_values = YahooScraper.get_option_expirations(symbol) for date_value in date_values: path = PathMgr.get_yahoo_option_path(symbol, date_value) content = YahooScraper.ingest_option(symbol, date_value) write_to_file(path, content) time.sleep(1) logger.info('ingest option data completed..')
def ingest_all_historical_etf(date_from = '1993-01-29', date_to=None, symbols=None): if symbols is None: symbols = Symbols.get_all_symbols() date_to = date_to or datetime.date.today().strftime("%Y-%m-%d") logger = Logger(__name__, PathMgr.get_log_path()) for symbol in symbols: logger.info('ingest for %s...' % symbol) path = PathMgr.get_historical_etf_path(symbol) content = YahooScraper.download_quote2(symbol, date_from, date_to) write_to_file(path, content) time.sleep(1)
def realtimedata_to_csv(): records = \ EquityRealTimeDAO().get_min_time_and_price('SVXY', datetime.datetime(2018, 05, 29, 0, 0, 0), datetime.datetime(2018, 05, 30, 0, 0, 0), ) lines = map(lambda x: '%s,%s' % (x[0], x[1]), records[1:]) content = '\n'.join(lines) write_path = PathMgr.get_data_path( 'quantopian_daily_min/realtime_%s%s.csv' % ('SVXY', '20180529')) write_to_file(write_path, content)
def set(self, section_name, key, value): dic = {} if os.path.exists(self.file_path): try: content = read_file_to_string(self.file_path) dic = json.loads(content) except Exception as e: pass if section_name in dic.keys(): dic[section_name][key] = value else: section_dic = {key: value} dic[section_name] = section_dic write_to_file(self.file_path, json.dumps(dic))
def save_to_csv(self, trade_date=None): if trade_date is None: trade_date = TradeTime.get_latest_trade_date() start_time = datetime.datetime(trade_date.year, trade_date.month, trade_date.day, 9, 30, 0) end_time = datetime.datetime(trade_date.year, trade_date.month, trade_date.day, 16, 0, 0) query = """select * from equity_realtime where tradeTime >= '{}' and tradeTime <= '{}'""".format(start_time, end_time) rows = self.select(query) if rows is not None and len(rows) > 0: records = map(lambda x: ','.join(map(str, x[1:])), rows) content = '\n'.join(records) raw_daily_path = PathMgr.get_raw_data_path(datetime.date.today().strftime('%Y-%m-%d')) realtime_dir = os.path.join(raw_daily_path, 'realtime') ensure_dir_exists(realtime_dir) file_path = os.path.join(realtime_dir, '%s.csv' % trade_date.strftime('%Y-%m-%d')) write_to_file(file_path, content)
def to_csv(self, symbol, date): file_name = '%s%s.log' % (symbol, date.strftime('%Y%m%d')) path = PathMgr.get_data_path('quantopian_daily_min/%s' % file_name) content = read_file_to_string(path) lines = content.split('\n') filtered_lines = filter(lambda x: len(x) > 100, lines) lines = map(lambda x: string_fetch(x, 'PRINT ', ''), filtered_lines) close_list_str = ','.join(lines) # print close_list_str prices_list = map(float, close_list_str.split(',')) datetimes = TradeTime.generate_datetimes(date, date) new_lines = map(lambda x, y: '%s,%s' % (x, y), datetimes, prices_list) new_content = '\n'.join(new_lines) write_path = PathMgr.get_data_path('quantopian_daily_min/%s%s.csv' % (symbol, date.strftime('%Y%m%d'))) write_to_file(write_path, new_content)
def get_order_id(): order_file_path = PathMgr.get_data_file_path('orderid.txt') order_id = int(read_file_to_string(order_file_path)) write_to_file(order_file_path, str(order_id + 1)) return order_id
def gen_vix_data(self): file_path = os.path.join(self.vix_data_dir, 'vix.json') content = BarchartScraper.get_vix_data() write_to_file(file_path, content)
def gen_option_data(self, symbol, expiration_date): file_path = os.path.join(self.option_data_dir, '{}{}.json'.format(symbol, expiration_date)) content = BarchartScraper.get_option_data(symbol, expiration_date) write_to_file(file_path, content)
def gen_equity_data(self, symbol): file_path = os.path.join(self.equity_dir, '{}.json'.format(symbol)) content = BarchartScraper.get_equity_data(symbol) write_to_file(file_path, content)