def __init__(self, session=None, configuration=None): # 2018-4-4: This is a hack for an outstanding issue in our environment that # causes bitstamp to reject all but the first requests made with a # requests.Session() object. See trello for more information. if not session: session = FuturesSession(max_workers=10) session.cookies = ForgetfulCookieJar() super(BitstampBTCUSDExchange, self).__init__(session) # Immutable properties. # TODO: Check on status of the withdrawal_requests_url (might need a dash). # TODO: Check if the withdraw_url is still being used or why it isn't in the # v2 API. self.name = u'BITSTAMP_BTC_USD' self.friendly_name = u'Bitstamp BTC-USD' self.base_url = 'https://www.bitstamp.net/api/v2/' self.currency = u'USD' self.volume_currency = 'BTC' self.price_decimal_precision = 2 self.volume_decimal_precision = 8 # Configurables defaults. self.market_order_fee = self.fee self.limit_order_fee = self.fee self.fee = Decimal('0.0005') # TODO: update these. self.fiat_balance_tolerance = Money('0.0001', 'USD') self.volume_balance_tolerance = Money('0.00000001', 'BTC') self.max_tick_speed = 1 self.min_order_size = Money('0.001', 'BTC') self.use_cached_orderbook = False if configuration: self.configure(configuration) # Endpoints. self.ticker_url = 'ticker/btcusd/' self.orderbook_url = 'order_book/btcusd/' self.buy_url = 'buy/btcusd/' self.sell_url = 'sell/btcusd/' self.open_orders_url = 'open_orders/btcusd/' self.trade_status_url = 'user_transactions/btcusd/' self.balance_url = 'balance/' self.trade_cancel_url = 'cancel_order/' self.withdrawl_requests_url = 'withdrawal_requests/' self.withdraw_url = 'https://priv-api.bitstamp.net/api/bitcoin_withdrawal/'
def try_forgetful_futures(): """ Tests the workaround using ForgetfulCookieJar with requests_futures. """ from requests_toolbelt.cookies.forgetful import ForgetfulCookieJar from requests_futures.sessions import FuturesSession session = FuturesSession() jar = ForgetfulCookieJar() session.cookies = jar payload1 = construct_payload() req1 = session.post(balance_url, payload1) resp1 = req1.result().text payload2 = construct_payload() req2 = session.post(balance_url, payload2) resp2 = req2.result().text test_response(resp1, 'Forgetful Futures req1') test_response(resp2, 'Forgetful Futures req2') return session
def run(self): settings = QSettings() pref_target_path = settings.value(Settings.SETTINGS_SAVE_PATH, Settings.DEFAULT_TARGET_PATH, type=str) pref_max_pool_cnt = settings.value(Settings.SETTINGS_MAX_POOL_CNT, Settings.DEFAULT_MAX_POOL, type=int) gallery_save_path = pref_target_path+'/'+self.gallery.path if not os.path.exists(gallery_save_path): os.makedirs(gallery_save_path) # Cloudflare Authorization self.state.emit('Authorize..') Logger.LOGGER.info("Wait for Cloudflare Authorization..") self.driver.get(URL_HIYOBI) while "Just a moment..." in self.driver.page_source: pass user_agent = self.driver.execute_script("return navigator.userAgent;") try: cookie_value = '__cfduid=' + self.driver.get_cookie('__cfduid')['value'] + \ '; cf_clearance=' + self.driver.get_cookie('cf_clearance')['value'] headers = {'User-Agent': user_agent} cookies = {'session_id': cookie_value} except TypeError: Logger.LOGGER.warning("Not apply cookies to requests") headers = None cookies = None # Fetch image data from gallery page self.state.emit('Fetch..') Logger.LOGGER.info("Connect to Gallery page..") self.driver.get(self.gallery.url) sleep(1) soup = BeautifulSoup(self.driver.page_source, "html.parser") # Start download multi-thread Logger.LOGGER.info("Download Start..") img_urls = soup.find_all('div', class_="img-url") self.total_cnt = len(img_urls) session = FuturesSession(max_workers=pref_max_pool_cnt) if headers is not None: session.headers = headers if cookies is not None: session.cookies = cookies responses = {} for url_path in img_urls: url = READER_URL+url_path.text name = url.split('/')[-1] responses[name] = session.get(url) for filename in responses: self.response_to_file(response=responses[filename].result(), name=filename, path=gallery_save_path) session.close() # Compress Zip Files self.state.emit('Compressing..') if self.gallery.original != "": zip_path = pref_target_path+'/'+self.gallery.type+'/'+self.gallery.original+'/'+self.gallery.path+'.zip' else: zip_path = pref_target_path+'/'+self.gallery.type+'/'+self.gallery.path+'.zip' try: if not os.path.exists(zip_path[:zip_path.rfind('/')]): os.makedirs(zip_path[:zip_path.rfind('/')]) FileUtil.make_zip(gallery_save_path, zip_path) shutil.rmtree(gallery_save_path) except: print(traceback.format_exc()) Logger.LOGGER.error("Compressing Process Error... pass") # Save to Firebase # TODO Enable next line on Build FirebaseClient.fbclient.insert_data(self.gallery)