def get_backtests(): """ Reads through all backtest serialized files and prints them Returns: None """ for dir_file in os.listdir(BACKTESTS_PATH): print(dir_file) log.info("Found file {0}".format(dir_file)) print(BACKTESTS_PATH + dir_file) with open(BACKTESTS_PATH + dir_file, "rb") as f: print('sdsd') print(f) # Load serialized backtest bt = pickle.load(f) # Appends to backtest dataframe backtest_log.loc[backtest_log.shape[0]] = [ bt.timestamp, bt.strategy.name, bt.note, bt.strategy.direction, bt.time_frame, bt.num_of_stocks, bt.winners, bt.losers, bt.winning_percent, bt.pct_chg_avg ] backtest_log.set_index('timestamp', inplace=True) backtest_log.sort_index(ascending=False, inplace=True) # Fixes misaligned index with headers backtest_log.columns.name = backtest_log.index.name backtest_log.index.name = None print backtest_log
def make_dataframe_from_csv(ticker): """ Creates initial datafarme from ticker csv/txt file Args: ticker (str): Ticker of the stock Returns: csv_df: Dumb Pandas dataframe of the stock. Each column is a string """ # Path for the ticker Kibot data path = os.path.join(PATH_TO_STOCKS_FOLDER, '%s.txt' % ticker) # path = '/Users/system-void/gdrive/code/data/stocks/5min/%s.txt' % ticker try: # Get the size (lines) of file (must open file) size = sum(1 for l in open(path)) except Exception: log.error('%7s - Ticker not found in equity database.' % ticker) raise # raise ValueError('Invalid path to stock files') else: log.info('%7s - Loading history into memory' % ticker) csv_df = pd.read_csv(path, skiprows=range( 1, size - 7500)) # Read only the last 7500 lines of the csv return csv_df
def __init__(self, use_cookies=True): """ Initialization loads cookies by default so no login will be required unless needed""" self.ses = requests.Session() # self.ses.verify = "charles-ssl-proxying-certificate.pem" # Use when charles is open # Default headers. These will persist throughout all requests self.ses.headers = { 'Accept': '*/*', 'Content-type': 'application/x-www-form-urlencoded; charset=UTF-8', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', 'referer': 'https://www.instagram.com/', 'x-requested-with': 'XMLHttpRequest', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko)' 'Chrome/66.0.3359.139 Safari/537.36' } self.user_data = None self.last_resp = None self.status = None self.msg = None self.username = None self.use_cookies = use_cookies self.rhx_gis = None if os.path.isfile('cookies') and use_cookies: self._load_cookies() self.get_username() self._get_init_csrftoken() log.info('Logged in as {}'.format(self.username))
def test_login_form_fields_are_not_empty(self): with app.app_context(): form = LoginForm(data={'username': '******', 'password': ''}) form.validate() for field, error in form.errors.items(): log.info("{} - {}".format(field, error)) self.assertFalse(form.validate(), "Login form fields are empty")
def test_login_form_validates(self): with app.app_context(): form = LoginForm(data={'username': '******', 'password': '******'}) form.validate() for field, error in form.errors.items(): log.info("{} - {}".format(field, error)) self.assertTrue(form.validate())
def pickle_results(self): pickle_name = self.strategy.name pickle_name = pickle_name.lower().replace(' ', '_') f = open( "bt_results/{0}-{1}".format( self.timestamp.replace(' ', '-'), pickle_name), "wb") log.info("Pickling backtesting object") pickle.dump(self, f) f.close()
def save(self, stocks, pickle_name): """ Pickles a given stock list or later use Args: stocks (list): Stock list to pickle pickle_name (str): : Name of pickled stock list Returns: """ log.info("Pickling object stocks as {pickle_name}".format( pickle_name=pickle_name)) pickle.dump(stocks, open(pickle_name, "wb"))
def mp_worker(ticker, data): try: csv_df = data csv_df = correct_csv_df(csv_df) csv_df = csv_df.ix[csv_df.index.indexer_between_time( datetime.time(9, 30), datetime.time(16))] csv_df = add_columns(csv_df) except Exception as ex: print ex else: log.info( '%7s - Worker has corrected CSV file and added the appropriate columns' % ticker) return csv_df
def test_file_logging(log): from log3 import log from log3 import log_to_file log.enable_logging() file = open('/tmp/mylog', 'w+') log_to_file('/tmp/mylog') log.info('logged to file') try: line1 = file.readline() # os.system('say hi {}'.format(line1)) assert 'logged to file' in line1 finally: file.close() os.remove('/tmp/mylog')
def fetch_quotes(keyword, start=0): """ Fetch quotes from keyword Args: keyword (str): The topic or subject of interest start (int): Location to start fetching quotes. 50, 100, 150, etc Yields: quote, author: A tuple containing quote, and author """ params = { 'keyword': keyword, 'boolean': 'and', 'field': 'all', 'frank': 'all', 'database': 'all', 'start': start } log.info('Fetching quotes for keyword %s' % keyword) resp = requests.get( 'https://creativequotations.com/cgi-bin/sql_search3.cgi', params=params) doc = html.fromstring(resp.content) quotes = [ quote.text_content().strip("\"") for quote in doc.xpath('//li/b') ] authors = [ author.split('(')[0].strip() for author in doc.xpath('//li/b/following-sibling::text()[2]') ] matches_found = doc.xpath( '//b[contains(text(), "matches found")]/text()')[0] rx = re.compile('\d+') matches_found = re.search(rx, matches_found)[0] for quote, author in zip(quotes, authors): yield quote, author
def remove_tickers_not_in_local_db(self, tickers): """ Check if each ticker is in the local database. If not remove it from universe Args: tickers (list): Check if tickers exists locally """ for ticker in tickers[:]: path = os.path.join(PATH_TO_STOCKS_FOLDER, '%s.txt' % ticker) # path = '/Users/system-void/gdrive/code/data/stocks/5min/%s.txt' % ticker try: # Get the size (lines) of file (must open file) size = sum(1 for l in open(path)) except Exception as ex: log.info( '%7s - Ticker not found in local equity database. Removing from universe' % ticker) #print ex self.tickers.remove(ticker)
def get_instagram_gis(self, params): """ Returns a generated gis to be used in request headers""" if not self.rhx_gis: self.get_rhx_gis() # Stringify stringified = None if isinstance(params, dict): stringified = json.dumps(params) log.info("STRINGIFIED: {}".format(stringified)) else: stringified = params unhashed_gis = "{}:{}".format(self.rhx_gis, stringified) unhashed_gis = unhashed_gis.encode('utf-8') log.info("Unhashed gis: {}".format(unhashed_gis)) encoded_gis = hashlib.md5(unhashed_gis).hexdigest() return encoded_gis
def login(self, username, password): """Login to instagram. A new CSRF token cookie is generated after an authenticated login request. This token is reused throughout the session. It should be in the request cookies and in the header `x-csrftoken` for all subsequent POST sensitive requests Args: username (str): Your instagram username password (str): Your instagram password Raises: LoginAuthenticationError: Raised when authentication has failed CheckpointRequired: Raised when instagram has detected an unusual login attempt. This usually happens when your IP is different or your mid (MACHINE_ID) cookie has changed """ self._get_init_csrftoken() login_data = {'username': username, 'password': password} try: log.info("Logging in as {}".format(username)) self._make_request( login_endpoint, data=login_data, msg="Login request sent") except requests.exceptions.HTTPError: resp_data = self.last_resp.json() if resp_data['message'] == 'checkpoint_required': self._save_cookies() checkpoint_url = resp_data['checkpoint_url'] checkpoint_id = checkpoint_url.split("/")[2] checkpoint_code = checkpoint_url.split("/")[3] self._make_request(checkpoint_url) log.debug( challenge_endpoint.format( id=checkpoint_id, code=checkpoint_code)) self._make_request( challenge_endpoint.format( id=checkpoint_id, code=checkpoint_code), data={'choice': '1'}) msg = """ Instagram has flagged this login as suspicious. You are either signing in from an unknown IP or your machine id (mid) has changed. Please enter the six-digit code that was sent to your instagram registered email to proceed. """ log.error(msg) while True: verification_code = input( "Enter the six-digit verification code. Type REPLAY to request another one: " ) if verification_code == 'REPLAY': self._make_request( challenge_replay.format( id=checkpoint_id, code=checkpoint_code), post=True) else: self._make_request( challenge_endpoint.format( id=checkpoint_id, code=checkpoint_code), data={'security_code': verification_code}) break if self.is_loggedin: log.info('Logged in successfully') # Store cookies with open('cookies', 'wb+') as file: log.debug('Saving cookies {}'.format( self.ses.cookies.get_dict())) pickle.dump(self.ses.cookies, file) else: raise LoginAuthenticationError else: resp_data = self.last_resp.json() if resp_data['authenticated']: log.info('Logged in successfully') self.ses.headers.update({ 'x-csrftoken': self.last_resp.cookies['csrftoken'] }) assert 'sessionid' in self.ses.cookies.get_dict() self._save_cookies() self.username = username else: raise LoginAuthenticationError
def _make_request(self, endpoint, data=None, params=None, msg='', post=False, headers=None): """ Shorthand way to make a request. Args: endpoint (str): API endpoint data (dict, None): Data if using POST request params (dict, None): Params, if needed msg (str): Message to log when response is successful post (bool): True if this is a POST request, FALSE otherwise Returns: Response: Requests response object """ resp = None # headers=headers # Combine persistent headers with new headers temporarily if headers: headers = {**self.ses.headers, **headers} else: headers = self.ses.headers try: if not data and not post: resp = self.ses.get( base_endpoint + endpoint, headers=headers, params=params) resp.raise_for_status() else: resp = self.ses.post( base_endpoint + endpoint, data=data, headers=headers, allow_redirects=False) resp.raise_for_status() except RequestException as ex: if len(resp.text) > 300: log.error('STATUS: {} - CONTENT: {}'.format( resp.status_code, resp.text)) else: log.error('STATUS: {} - CONTENT: {}'.format( resp.status_code, resp.text)) self.last_resp = resp self.status = resp.status_code self.msg = resp.content raise except ConnectionResetError as e: log.error('Server closed the connection') log.debug(""" STATUS: {} RESPONSE HEADERS: {} RESPONSE CONTENT: {} """.format(self.last_resp.status_code, self.last_resp.headers, self.last_resp.content)) raise else: if len(resp.text) > 300: log.success('STATUS: {} - CONTENT (truncated): {}'.format( resp.status_code, resp.text[:300] + "...")) else: log.success('STATUS: {} - CONTENT: {}'.format( resp.status_code, resp.text)) # log.info(msg) self.last_resp = resp self.status = resp.status_code self.msg = resp.content if msg: log.info(msg) return resp
def get_stock_ticker_list(self): """ Gets a list of stocks given API criteria Returns: stocks (list): List of stock tickers matching given criteria as specified in the url instance variable """ retries = 4 stocks = [] try: log.info("Getting request for stock screener") resp = requests.get(self.url, auth=(self.username, self.password), timeout=10) resp.raise_for_status() except requests.HTTPError as http_error: log.error("Server returned the following HTTP Error: {http_error}". format(http_error=http_error)) except requests.exceptions.Timeout: log.error('Timeout') else: log.info('SUCCESS') try: log.info("Trying to read json file . . .") data1 = resp.json() except Exception as ex: print ex else: if 'errors' in data1: print data1['errors'][0]['human'] raise ValueError(data1['errors'][0]['human']) else: log.info('SUCCESS') total_pages = data1['total_pages'] result_count = data1['result_count'] log.info( "Total pages: {total_pages} \tresult_count: {result_count}" .format(total_pages=total_pages, result_count=result_count)) stocks = [] for p in range(1, total_pages + 1): url_new = self.url + "&page_number=" + str(p) for r in range(retries): try: log.debug( " Going to page number {p}".format(p=p)) resp = requests.get(url_new, auth=(self.username, self.password), timeout=10) if resp.status_code == 503: log.error( "503 Error: Server is too busy. Retrying again" ) resp.raise_for_status() except Exception as ex: print ex continue else: log.info("SUCCESS") data = resp.json() for stock in data['data']: stocks.append(str(stock['ticker'])) break return stocks
def _individual_backtest(self, stock, buy_start, buy_end): ticker = stock[0] df = stock[1] index = 0 buy_date = 0 buy_price = 0 volume = 0 init = 0 then = time.time() self.buy_start = buy_start self.buy_end = buy_end proc = os.getpid() # print('{0} backtester5:\t {1}\t operated by PID: {2}'.format(str(dt.now().time())[:8], ticker, proc)) if self.extended_hours is False: df = df.ix[df.index.indexer_between_time( datetime.time(9, 30), datetime.time(16))] else: pass # Add code to add columns here. # df = add_columns(df) # Buying range: Date must be the index df = df.loc['{buy_start} 9:30'.format( buy_start=self.buy_start):'{buy_end} 16:00'.format( buy_end=self.buy_end)] df = df.reset_index() df = df.rename({'index': 'date'}, axis='columns') index_array = self.strategy.get_buy_coordinates(df) log.debug('Index array is %s' % index_array) # self.strategy.check_for_conditions(self, df, ticker, index_array) trade_row = pd.DataFrame(columns=[ 'index', 'buy_date', 'ticker', 'buy_price', 'sell_date', 'sell_price', 'volume' ]) for index in index_array: # Store the close as the buying price buy_price = df.iloc[index]['close'] buy_date = df.iloc[index]['date'] init = int(index) # Initial value volume = df.iloc[index]['volume'] sell_date, sell_price = self.strategy.sell_algorithm( init, ticker, df) sell_date = sell_date sell_price = sell_price # print "inside loop %.4f" % sell_date # print "inside loop %.4f" % sell_price log.info( '%7s - Bought @ %.3f at LOC: %d ' % (ticker, buy_price, index)) # print "%s backtester2: \t %s \t BUY @ %.3f\t LOC: %d" % \ # (dt.now().strftime("%H:%M:%S"), ticker, buy_price, index) if sell_date or sell_price != 0: trade_row.loc[trade_row.__len__()] = [ index, buy_date, ticker, buy_price, sell_date, sell_price, volume ] if len(index_array) == 0: current2_time = str(dt.now().time())[:8] log.info('%7s - Operation by PID %s. Task completed in %s' % (ticker, proc, time.time() - then)) # print('{0}\t Ticker {1} being operated on by process id: {2}'.format(current2_time, ticker, proc)), # print "%s backtester2: \t %s \t Task completed in: %s" \ # % (dt.now().strftime("%H:%M:%S"), ticker, time.time() - then) else: log.info('%7s - Operation by PID %s. Task completed in %s' % (ticker, proc, time.time() - then)) # print "%s backtester2: \t %s \t Task completed in: %s" \ # % (dt.now().strftime("%H:%M:%S"), ticker, time.time() - then) # print "Test %d" % sell_date # print "test %d" % sell_price # if sell_date or sell_price != 0: self.trade_log = self.trade_log.append(trade_row) self._isdone = True
def test_log_info(capsys): from log3 import log log.info("info") _, stderr = capsys.readouterr()