def like(self, inpt): """ Like a single post. media_id or shortcodee""" media_id = inpt if isinstance(inpt, str) and not inpt.isdigit(): media_id = code_to_media_id(inpt) log.debug(media_id) try: self._make_request( like_endpoint.format(media_id=media_id), post=True, msg='Liked %s' % media_id) except requests.HTTPError as e: if e.response.text == 'missing media': raise MissingMedia( "The post you are trying to like has most likely been removed" ) if e.response.status_code == 400 and e.response.text == action_blocked[ 'text']: raise ActionBlocked(" This action was blocked") elif e.response.status_code in range(500, 600): raise ServerError("An uknown server error ocurred") except requests.RequestException: raise
def _save_cookies(self): """ Save cookies locally """ with open('cookies', 'wb+') as file: log.debug('Saving cookies {}'.format(self.ses.cookies.get_dict())) pickle.dump(self.ses.cookies, file) log.debug("SESSION COOKIES: {}".format(self.ses.cookies.get_dict()))
def test_log_debug(capsys, log): from log3 import log log.setLevel('DEBUG') log.debug("debug") _, stderr = capsys.readouterr() assert "debug" in stderr
def get_hash_feed(self, hashtag, pages=4): """ Get hashtag feed Login is NOT required for this endpoint Args: hashtag (str): The hashtag to be used. Ex. #love, #fashion, #beautiful, etc Don't put the pound sign pages (int): The number of pages to crawl. Recommended to not go above four Returns: dict: Hashtag dictionary containing information about a specific hash """ variables = { 'tag_name': hashtag, 'first': pages, } params = { 'query_hash': get_hashinfo_query, 'variables': json.dumps(variables) } instagram_gis = self.get_instagram_gis(variables) log.debug('Used gis %s' % instagram_gis) headers = {'x-instagram-gis': instagram_gis} resp = self._make_request( graphql_endpoint, params=params, headers=headers, msg='Hash feed was received') try: data = resp.json() # => Possible JSONDecoderror except JSONDecodeError: # Server might return incomplete JSON response or requests might be truncating them. # The content-length does not match in some instances log.warning('Received an incomplete JSON response') raise IncompleteJSON except requests.exceptions.HTTPError as e: if e.response.status_code in range(500, 600): raise ServerError("An uknown server error ocurred") else: if not data['data']['hashtag']: raise InvalidHashtag( "Received no data for hashstag. Please make sure it was entered properly" ) return data['data']['hashtag']['edge_hashtag_to_media']['edges']
def _load_cookies(self): """ Check if there are cookies saved to save on login API calls""" if os.path.isfile('cookies'): log.debug('Loading cookies') with open('cookies', 'rb+') as file: cookies = pickle.load(file) log.debug('STORED COOKIE: {}'.format(cookies.get_dict())) self.ses.cookies = cookies else: raise NoCookiesFound("No cookies found in the current directory")
def get_user_info(self, username): """ Gets information about an user Args: username (str): The actual literal username Returns: dict: JSON decoded response """ variables = '/{}/'.format(username) instagram_gis = self.get_instagram_gis(variables) log.debug('Used gis %s' % instagram_gis) headers = {'x-instagram-gis': instagram_gis} resp = self._make_request( user_info_endpoint.format(username=username), msg='User info data received', headers=headers) return resp.json()['graphql']['user']
def login(self, username, password): """Login to instagram. A new CSRF token cookie is generated after an authenticated login request. This token is reused throughout the session. It should be in the request cookies and in the header `x-csrftoken` for all subsequent POST sensitive requests Args: username (str): Your instagram username password (str): Your instagram password Raises: LoginAuthenticationError: Raised when authentication has failed CheckpointRequired: Raised when instagram has detected an unusual login attempt. This usually happens when your IP is different or your mid (MACHINE_ID) cookie has changed """ self._get_init_csrftoken() login_data = {'username': username, 'password': password} try: log.info("Logging in as {}".format(username)) self._make_request( login_endpoint, data=login_data, msg="Login request sent") except requests.exceptions.HTTPError: resp_data = self.last_resp.json() if resp_data['message'] == 'checkpoint_required': self._save_cookies() checkpoint_url = resp_data['checkpoint_url'] checkpoint_id = checkpoint_url.split("/")[2] checkpoint_code = checkpoint_url.split("/")[3] self._make_request(checkpoint_url) log.debug( challenge_endpoint.format( id=checkpoint_id, code=checkpoint_code)) self._make_request( challenge_endpoint.format( id=checkpoint_id, code=checkpoint_code), data={'choice': '1'}) msg = """ Instagram has flagged this login as suspicious. You are either signing in from an unknown IP or your machine id (mid) has changed. Please enter the six-digit code that was sent to your instagram registered email to proceed. """ log.error(msg) while True: verification_code = input( "Enter the six-digit verification code. Type REPLAY to request another one: " ) if verification_code == 'REPLAY': self._make_request( challenge_replay.format( id=checkpoint_id, code=checkpoint_code), post=True) else: self._make_request( challenge_endpoint.format( id=checkpoint_id, code=checkpoint_code), data={'security_code': verification_code}) break if self.is_loggedin: log.info('Logged in successfully') # Store cookies with open('cookies', 'wb+') as file: log.debug('Saving cookies {}'.format( self.ses.cookies.get_dict())) pickle.dump(self.ses.cookies, file) else: raise LoginAuthenticationError else: resp_data = self.last_resp.json() if resp_data['authenticated']: log.info('Logged in successfully') self.ses.headers.update({ 'x-csrftoken': self.last_resp.cookies['csrftoken'] }) assert 'sessionid' in self.ses.cookies.get_dict() self._save_cookies() self.username = username else: raise LoginAuthenticationError
def _make_request(self, endpoint, data=None, params=None, msg='', post=False, headers=None): """ Shorthand way to make a request. Args: endpoint (str): API endpoint data (dict, None): Data if using POST request params (dict, None): Params, if needed msg (str): Message to log when response is successful post (bool): True if this is a POST request, FALSE otherwise Returns: Response: Requests response object """ resp = None # headers=headers # Combine persistent headers with new headers temporarily if headers: headers = {**self.ses.headers, **headers} else: headers = self.ses.headers try: if not data and not post: resp = self.ses.get( base_endpoint + endpoint, headers=headers, params=params) resp.raise_for_status() else: resp = self.ses.post( base_endpoint + endpoint, data=data, headers=headers, allow_redirects=False) resp.raise_for_status() except RequestException as ex: if len(resp.text) > 300: log.error('STATUS: {} - CONTENT: {}'.format( resp.status_code, resp.text)) else: log.error('STATUS: {} - CONTENT: {}'.format( resp.status_code, resp.text)) self.last_resp = resp self.status = resp.status_code self.msg = resp.content raise except ConnectionResetError as e: log.error('Server closed the connection') log.debug(""" STATUS: {} RESPONSE HEADERS: {} RESPONSE CONTENT: {} """.format(self.last_resp.status_code, self.last_resp.headers, self.last_resp.content)) raise else: if len(resp.text) > 300: log.success('STATUS: {} - CONTENT (truncated): {}'.format( resp.status_code, resp.text[:300] + "...")) else: log.success('STATUS: {} - CONTENT: {}'.format( resp.status_code, resp.text)) # log.info(msg) self.last_resp = resp self.status = resp.status_code self.msg = resp.content if msg: log.info(msg) return resp
def get_stock_ticker_list(self): """ Gets a list of stocks given API criteria Returns: stocks (list): List of stock tickers matching given criteria as specified in the url instance variable """ retries = 4 stocks = [] try: log.info("Getting request for stock screener") resp = requests.get(self.url, auth=(self.username, self.password), timeout=10) resp.raise_for_status() except requests.HTTPError as http_error: log.error("Server returned the following HTTP Error: {http_error}". format(http_error=http_error)) except requests.exceptions.Timeout: log.error('Timeout') else: log.info('SUCCESS') try: log.info("Trying to read json file . . .") data1 = resp.json() except Exception as ex: print ex else: if 'errors' in data1: print data1['errors'][0]['human'] raise ValueError(data1['errors'][0]['human']) else: log.info('SUCCESS') total_pages = data1['total_pages'] result_count = data1['result_count'] log.info( "Total pages: {total_pages} \tresult_count: {result_count}" .format(total_pages=total_pages, result_count=result_count)) stocks = [] for p in range(1, total_pages + 1): url_new = self.url + "&page_number=" + str(p) for r in range(retries): try: log.debug( " Going to page number {p}".format(p=p)) resp = requests.get(url_new, auth=(self.username, self.password), timeout=10) if resp.status_code == 503: log.error( "503 Error: Server is too busy. Retrying again" ) resp.raise_for_status() except Exception as ex: print ex continue else: log.info("SUCCESS") data = resp.json() for stock in data['data']: stocks.append(str(stock['ticker'])) break return stocks
def _individual_backtest(self, stock, buy_start, buy_end): ticker = stock[0] df = stock[1] index = 0 buy_date = 0 buy_price = 0 volume = 0 init = 0 then = time.time() self.buy_start = buy_start self.buy_end = buy_end proc = os.getpid() # print('{0} backtester5:\t {1}\t operated by PID: {2}'.format(str(dt.now().time())[:8], ticker, proc)) if self.extended_hours is False: df = df.ix[df.index.indexer_between_time( datetime.time(9, 30), datetime.time(16))] else: pass # Add code to add columns here. # df = add_columns(df) # Buying range: Date must be the index df = df.loc['{buy_start} 9:30'.format( buy_start=self.buy_start):'{buy_end} 16:00'.format( buy_end=self.buy_end)] df = df.reset_index() df = df.rename({'index': 'date'}, axis='columns') index_array = self.strategy.get_buy_coordinates(df) log.debug('Index array is %s' % index_array) # self.strategy.check_for_conditions(self, df, ticker, index_array) trade_row = pd.DataFrame(columns=[ 'index', 'buy_date', 'ticker', 'buy_price', 'sell_date', 'sell_price', 'volume' ]) for index in index_array: # Store the close as the buying price buy_price = df.iloc[index]['close'] buy_date = df.iloc[index]['date'] init = int(index) # Initial value volume = df.iloc[index]['volume'] sell_date, sell_price = self.strategy.sell_algorithm( init, ticker, df) sell_date = sell_date sell_price = sell_price # print "inside loop %.4f" % sell_date # print "inside loop %.4f" % sell_price log.info( '%7s - Bought @ %.3f at LOC: %d ' % (ticker, buy_price, index)) # print "%s backtester2: \t %s \t BUY @ %.3f\t LOC: %d" % \ # (dt.now().strftime("%H:%M:%S"), ticker, buy_price, index) if sell_date or sell_price != 0: trade_row.loc[trade_row.__len__()] = [ index, buy_date, ticker, buy_price, sell_date, sell_price, volume ] if len(index_array) == 0: current2_time = str(dt.now().time())[:8] log.info('%7s - Operation by PID %s. Task completed in %s' % (ticker, proc, time.time() - then)) # print('{0}\t Ticker {1} being operated on by process id: {2}'.format(current2_time, ticker, proc)), # print "%s backtester2: \t %s \t Task completed in: %s" \ # % (dt.now().strftime("%H:%M:%S"), ticker, time.time() - then) else: log.info('%7s - Operation by PID %s. Task completed in %s' % (ticker, proc, time.time() - then)) # print "%s backtester2: \t %s \t Task completed in: %s" \ # % (dt.now().strftime("%H:%M:%S"), ticker, time.time() - then) # print "Test %d" % sell_date # print "test %d" % sell_price # if sell_date or sell_price != 0: self.trade_log = self.trade_log.append(trade_row) self._isdone = True
""" print "Loading stocks into memory" futures = th_run(my_universe) mp_futures = mp_run(futures) stocks = create_stocks(mp_futures) return stocks if __name__ == '__main__': ticker = ['CLNT', 'ANV'] my_universe = Universe(ticker, "test") futures = th_run(my_universe) mp_futures = mp_run(futures) log.debug('Pickling stock list') #pickle.dump(mp_futures, open('pickles/stock_list', 'wb')) # stocks = [] # for ticker in my_universe.tickers: # try: # csv_df = read_csv_df(ticker) # except Exception as ex: # print ex # print 'Skipping stock' # continue # else: # csv_df = correct_csv_df(csv_df) # csv_df = add_columns(csv_df) # stocks.append([ticker, csv_df])