def __init__(self, test, key, secret, symbol, side): self.test = test self.symbol = symbol self.client = RestClient(test, key, secret, symbol) self.side = side self.take_profit_order = None self.order_menu = []
def __init__(self, host='127.0.0.1', port=11211, timeout=30, admin_user="******", admin_pass="******", rest_port=8091, do_auth=True): self.host = host self.port = port self.timeout = timeout self._createConn() self.r = random.Random() self.vbucket_count = 1024 if do_auth: self.sasl_auth_plain(admin_user, admin_pass) # auth on any existing buckets rest_client = RestClient(host, port=rest_port) for bucket in rest_client.get_all_buckets(): try: self.bucket_select(bucket) except Exception as ex: # can be ignored...possibly warming up pass
def testlogin(zuora_settings): client = RestClient(zuora_settings) response = client.login() if response: print('No exceptions thrown') print('Success: ', response['success']) else: print('Exceptions thrown. Login failed.')
def testlogin(zuora_settings): client = RestClient(zuora_settings) response = client.login() if response: print('No exceptions thrown') print('Success: ', response['success']) else: print('Exceptions thrown. Login failed.')
def __init__(self, test, key, secret, symbol, side): self.test = test self.symbol = symbol self.client = RestClient(test, key, secret, symbol) self.side = side self.take_profit_order = None self.limit_orders = [] self.last_entry_price = []
def get_product_list(self, headers): rest_client = RestClient() products = rest_client.send_get( self.config["global"]["host"] + "/rest/" + self.config["global"]["store"] + "/V1/products?searchCriteria[filter_groups][0][filters][0][field]=sku&" "searchCriteria[filter_groups][0][filters][0][condition_type]=notnull", headers, None) return products
def __init__(self, test, key, secret, symbol, side): self.test = test self.symbol = symbol self.client = RestClient(test, key, secret, symbol) self.take_profit_order = None self.stop_loss_order = None self.next_stop_order = None self.position = {} self.orders = [] self.side = side
def test_04_unknown_exception_get(self): ''' Tests the GET request with an unknown error ''' bad_rest_client = RestClient(RestClient(), 2, 3) success, response = \ bad_rest_client.get_request(self.test_sample_from_portion, self.test_sample_select_portion) # Test that the request contents came back as expected assert success == False assert response == "An unidentified error has occured: unsupported operand type(s) for +: 'RestClient' and 'int'."
def test_15_connection_error_delete(self): ''' Tests the DELETE request with a connection error. ''' # Try making the request, but with a bad url bad_rest_client = RestClient("http://", "255.255.255.255", "22") success, response = \ bad_rest_client.delete_request(self.test_sample_from_portion, self.test_sample_select_portion) # Test that the request contents came back as expected assert success == False assert response == "A connection error has occured."
def gflag(endpoint, user, password, domain, body={}, action="get"): # Function to get and update the gflags from the clusters. # Returns response code and response. api = "clusters/gflag" rest_obj = RestClient(endpoint, user, password, domain) if action == "get": code, resp = rest_obj.get(api) else: code, resp = rest_obj.put(api, data=body) # If the response code is failed, returns empty list. Gflags APIs are # supported in cluster version >= 6.3. if code == 404: resp = json.dumps([]).encode("utf-8") return code, resp
def test_exception(self, m_errorlog): from rest_client import RestClient, RequestException client = RestClient(self.TEST_BASE) with mock.patch.object(client, 'session') as m_session: m_request = m_session.request m_request.side_effect = RequestException() with self.assertRaises(RequestException): client.call('GET', ()) m_errorlog.assert_called_once_with( 'failure', 'RequestException', 'GET', 'http://host/base', m_request.side_effect)
def main(): args = process_command_line() logging.basicConfig(level=int(args.verbose)) coap_logger = logging.getLogger('coap') coap_logger.setLevel(int(args.coap_log)) signal.signal(signal.SIGINT, signal_handler) hub_identity, authentication_key = get_hub_identity() rest_client = RestClient(hub_identity, authentication_key) if args.factory_reset: logging.info('Resetting device information') reset_tables(hub_identity, rest_client.hub_id) db = Database(rest_client) devices.rest_client = rest_client devices.db = db logging.info('Simulation: {0}'.format('on' if args.simulation else 'off')) _greenlets = [gevent.spawn(_notification_loop, rest_client.channel_id)] _greenlets = [] if args.simulation: devices.simulation_mode = True simulation.initialize(rest_client) _greenlets.append(gevent.spawn(simulation.simulation_loop)) else: _greenlets.append(gevent.spawn(tunslip.tunslip_loop)) logging.debug('Border router IP {0}'.format(get_br_ip_address())) _greenlets.append(gevent.spawn(devices.scan_loop, db, get_br_ip_address())) gevent.joinall(_greenlets) logging.info('Terminating uHub.')
def test_redirect(self, m_errorlog): from rest_client import RestClient client = RestClient(self.TEST_BASE) with mock.patch.object(client, 'session') as m_session: m_response = m_session.request.return_value m_response.is_redirect = True m_response.status_code = 301 m_response.json.return_value = {'error': 'ERROR', 'message': 'MSG'} m_response.headers = {'location': 'LOCATION'} with self.assertRaises(IOError): client.call('GET', ()) m_errorlog.assert_called_once_with( 'redirect', 'redirect', 'GET', 'http://host/base', 'LOCATION', body=m_response.content, status=301)
def submit_job(self, db_uri, production_uri, compara_uri, staging_uri, live_uri, hc_names, hc_groups, data_files_path, email, tag): """ Submit a database for checkiing Arguments: db_uri - database to check production_uri - production database compara_uri - compara master database staging_uri - location of current staging databases (used to check different database types for same genome) live_uri - location of current release databases (used for checks comparing current and new databases) hc_names - list of healthchecks to run hc_groups - list of healthcheck groups to run data_files_path - location of non-relational datafiles email - optional address for an email on job completion tag - optional tag to allow jobs to be grouped for reporting """ assert_mysql_db_uri(db_uri) assert_mysql_db_uri(production_uri) assert_mysql_db_uri(compara_uri) assert_mysql_uri(staging_uri) assert_mysql_uri(live_uri) logging.info("Submitting job") payload = { 'db_uri': db_uri, 'production_uri': production_uri, 'compara_uri': compara_uri, 'staging_uri': staging_uri, 'live_uri': live_uri, 'hc_names': hc_names, 'hc_groups': hc_groups, 'data_files_path': data_files_path, 'email': email, 'tag': tag } return RestClient.submit_job(self, payload)
def test_timeout_no_auto_set_arg(self): with mock.patch('requests.Session.request') as mock_request: rest_client = RestClient('localhost', 8000) rest_client.session.request('GET', '/test', None, None, None, None, None, None, 40) mock_request.assert_called_with('GET', '/test', None, None, None, None, None, None, 40)
def buckettest(oracle_file_path, rest_service_url): global client client = RestClient(rest_service_url) # static variables iterate_crash.print_after = BLOCK_SIZE iterate_crash.crashes_so_far = 0 iterate_crash.ingest_block = [] if START_GUNICORN: gunicorn_starter = GunicornStarter() try: if not BOOTSTRAP_RESUME_AT: reset_index() if TOTALLY_FAKE_DATA: synthesize(get_comparisons()) else: simulate(get_comparisons(), load_oracle_data(oracle_file_path)) except: traceback.print_exc() finally: print('Cleaing up...') if PARALLEL > 1: pool.terminate() pool.join() if START_GUNICORN: gunicorn_starter.stop_gunicorn()
def test_session_user_agent(self): from rest_client import RestClient user_agent = 'Poipoi/1.0' client = RestClient('http://host/base', user_agent=user_agent) self.assertIn('User-Agent', client.session.headers) self.assertIn(user_agent, client.session.headers['User-Agent'])
def test_response_server_error(self): httpretty.register_uri('GET', self.TEST_BASE, status=500) from rest_client import RestClient, HTTPError with self.assertRaises(HTTPError): RestClient(self.TEST_BASE).call('GET', ())
def test_segment(self, path, segments): httpretty.register_uri('GET', 'http://host%s' % path, status=200) from rest_client import RestClient RestClient(self.TEST_BASE).call('GET', segments) req = httpretty.last_request() self.assertEqual(req.path, path)
def test_auth(self): from rest_client import RestClient auth = ('user_name', 'password') RestClient(self.TEST_BASE, auth=auth).call('GET', ()) req = httpretty.last_request() auth = base64.b64encode('user_name:password') self.assert_header(req, 'authorization', 'Basic %s' % auth)
def test_method(self, method): httpretty.register_uri(method, self.TEST_BASE, status=200) from rest_client import RestClient RestClient(self.TEST_BASE).call(method, ()) req = httpretty.last_request() self.assertEqual(req.method, method)
def test_legacy(self): from rest_client import RestClient client = RestClient(self.TEST_BASE) client.requests_legacy = True # Force detection of requests 1.x with mock.patch.object(client, 'session') as m_session: m_response = m_session.request.return_value m_response.is_redirect = False m_response.status_code = 200 client.call('GET', [], json='[1, 2, 3]') m_session.request.assert_called_once_with( allow_redirects=False, data='"[1, 2, 3]"', headers={'Content-Type': 'application/json'}, method='GET', url='http://host/base')
def test_client_error(self, m_errorlog): from rest_client import RestClient, HTTPError client = RestClient(self.TEST_BASE) with mock.patch.object(client, 'session') as m_session: m_response = m_session.request.return_value m_response.is_redirect = False m_response.raise_for_status.side_effect = HTTPError() m_response.status_code = 400 m_response.json.return_value = {'error': 'ERROR', 'message': 'MSG'} with self.assertRaises(HTTPError): client.call('GET', ()) m_errorlog.assert_called_once_with( 'client', 'ERROR', 'GET', 'http://host/base', 'MSG', body=m_response.content, status=400)
def test_response_redirect(self): headers = {'Location': 'http://go-away.com'} httpretty.register_uri('GET', self.TEST_BASE, status=301, adding_headers=headers) from rest_client import RestClient with self.assertRaises(IOError): RestClient(self.TEST_BASE).call('GET', ())
def test_headers(self): from rest_client import RestClient RestClient(self.TEST_BASE).call('GET', ()) req = httpretty.last_request() self.assert_header(req, 'host', 'host') self.assert_header(req, 'connection', 'keep-alive') self.assert_header(req, 'accept', 'application/json') self.assert_header(req, 'accept-encoding', 'gzip, deflate')
def test_request(self): from rest_client import RestClient RestClient(self.TEST_BASE).call('GET', ()) req = httpretty.last_request() self.assertEqual(req.path, '/base') self.assertEqual(req.body, '') self.assertEqual(req.method, 'GET') self.assertEqual(req.request_version, 'HTTP/1.1')
def test_response(self): from rest_client import RestClient response = RestClient(self.TEST_BASE).call('GET', ()) import requests self.assertIsInstance(response, requests.Response) self.assertEqual(response.text, self.TEST_BODY) self.assertEqual(response.status_code, 200) self.assertEqual(response.url, self.TEST_BASE)
def initialize_backend(self): print '' logging.info("-------Setup Test Case-------") self.rest_client = RestClient(self.host, port=self.rest_port) if (self.backend == RemoteServer.MCD): self.memcached_backend_setup() else: self.couchbase_backend_setup() logging.info("-----Begin Test Case-----")
def couchbase_backend_setup(self): self.rest_client = RestClient(self.host, port=self.rest_port) for bucket in self.rest_client.get_all_buckets(): logging.info("Deleting bucket %s" % bucket) assert self.rest_client.delete_bucket(bucket) logging.info("Creating default bucket") assert self.rest_client.create_default_bucket() Stats.wait_for_warmup(self.host, self.port) self.upr_client = UprClient(self.host, self.port) self.mcd_client = McdClient(self.host, self.port)
def __init__(self, host='127.0.0.1', port=11211, timeout=30, admin_user="******", admin_pass="******", rest_port=8091, do_auth=True): self.host = host self.port = port self.timeout = timeout self._createConn() self.r = random.Random() self.vbucket_count = 1024 if do_auth: self.sasl_auth_plain(admin_user, admin_pass) # auth on any existing buckets rest_client = RestClient(host, port=rest_port) for bucket in rest_client.get_all_buckets(): try: self.bucket_select(bucket) except Exception as ex: # can be ignored...possibly warming up pass
def __init__(self): """Create a new Download class instance.""" self.temp_dir = tempfile.mkdtemp() logger.debug("__init__: temp_dir= " + self.temp_dir) self.session = requests.session() self.sso_rest_client = RestClient(self.session, self.garmin_sso_base_url) self.rest_client = RestClient(self.session, self.garmin_connect_modern_url) self.activity_service_rest_client = RestClient.inherit(self.rest_client, self.garmin_connect_activity_service) self.download_service_rest_client = RestClient.inherit(self.rest_client, self.garmin_connect_download_service) self.gc_gonfig = GarminConnectConfigManager() self.download_days_overlap = self.gc_gonfig.download_days_overlap()
def main(self): self.configure_logging() logging.getLogger("main.stdout").info( "Logging to file " + self.config["global"]["log_path"] + 'magento_updater.log') mode = "prod" if self.config["global"]["mode"] is not None: mode = self.config["global"]["mode"] rest_client = RestClient() token = self.authorize_user(rest_client) headers = { "Content-Type": "application/json", "Authorization": "Bearer " + token } if mode == "devel": self.add_dummy_products(rest_client, headers) logging.info("User has been successfully logged in.") read_item_list = self.prepare_different_items_list( rest_client, headers) self.update_magento_products(rest_client, headers, read_item_list) logging.info("Job done, exiting. Goodbye.")
def test_segment_type(self): from rest_client import RestClient client = RestClient(self.TEST_BASE) with self.assertRaises(TypeError): client.call('GET', 'thisIsNotATupleOrList')
class User: def __init__(self, test, key, secret, symbol, side): self.test = test self.symbol = symbol self.client = RestClient(test, key, secret, symbol) self.take_profit_order = None self.stop_loss_order = None self.next_stop_order = None self.position = {} self.orders = [] self.side = side def time_to_close(self): close = False if self.take_profit_order is not None: o = self.client.get_orders(filter='{"orderID": "%s"}' % self.take_profit_order['orderID']) if o is not None and o['ordStatus'] == 'Filled': close = True return close def close(self): try: self.client.cancel_all() time.sleep(1) self.client.close_position() time.sleep(1) self.take_profit_order = None self.stop_loss_order = None self.next_stop_order = None except Exception as e: log.warning("Close warning: %s" % e) def update_orders(self): self.take_profit_order = None self.stop_loss_order = None self.next_stop_order = None for o in self.orders: if o['ordType'] == 'Limit': self.take_profit_order = o elif o['ordType'] == 'Stop': if o['side'] == self.side: self.next_stop_order = o else: self.stop_loss_order = o def manage_orders(self, opposite): sign = 1 if self.side == 'Buy' else -1 tp_order = self.take_profit_order opp_tp_order = opposite.take_profit_order curr_open = False opposite_open = False entry_price = None opp_entry_price = None curr_qty = 0 opp_qty = 0 if len(opposite.position) > 0 and opposite.position['isOpen'] is True: opposite_open = True opp_entry_price = opposite.position['avgEntryPrice'] opp_qty = abs(opposite.position['currentQty']) if len(self.position) > 0 and self.position['isOpen'] is True: curr_open = True entry_price = self.position['avgEntryPrice'] curr_qty = abs(self.position['currentQty']) if curr_open is True: # # Take profit order # if opposite_open is False: tp_price = int(entry_price + (sign * entry_price * data['profitPercent'] / 300)) else: tp_price = int(entry_price + (sign * entry_price * data['profitPercent'] / 100)) if tp_order is None: self.client.new_order(orderQty=curr_qty, ordType="Limit", side=opposite.side, price=tp_price) time.sleep(5) elif (tp_order['orderQty'] != curr_qty) or (tp_order['price'] != tp_price): self.client.amend_order(orderID=tp_order['orderID'], orderQty=curr_qty, price=tp_price) time.sleep(5) # # Stop loss order # stop_loss_order = self.stop_loss_order if stop_loss_order is None and opp_tp_order is not None: self.client.new_order(orderQty=curr_qty, ordType="Stop", execInst="LastPrice", side=opposite.side, stopPx=opp_tp_order['price']) time.sleep(5) elif stop_loss_order is not None and opp_tp_order is not None: if stop_loss_order['orderQty'] != curr_qty or stop_loss_order[ 'stopPx'] != opp_tp_order['price']: self.client.amend_order(orderID=stop_loss_order['orderID'], orderQty=curr_qty, stopPx=opp_tp_order['price']) time.sleep(5) # # Next stop order # next_order = self.next_stop_order if next_order is None and opposite_open is True and curr_qty < opp_qty and opposite.next_stop_order is None \ and opp_tp_order is not None: if opp_tp_order['ordStatus'] == 'Filled': return if entry_price is None: entry_price = int(opp_entry_price + (sign * opp_entry_price * data['swingPercent'] / 100)) qty = math.ceil(opp_qty * data['qtyFactor']) if tp_order is None: tp_price = int(entry_price + (sign * entry_price * data['profitPercent'] / 100)) else: tp_price = tp_order['price'] losing_sum = opp_qty * abs((1 / opp_entry_price) - (1 / tp_price)) gaining_sum = qty * abs((1 / entry_price) - (1 / tp_price)) if abs(gaining_sum - losing_sum) > tp_price: for n in range(1, 100000): gaining_sum = (opp_qty + n) * abs((1 / entry_price) - (1 / tp_price)) if gaining_sum - losing_sum > tp_price: break qty = opp_qty + n qty -= curr_qty if qty > 0: stop_price = int(entry_price) self.client.new_order(orderQty=qty, ordType="Stop", execInst="LastPrice", side=self.side, stopPx=stop_price) time.sleep(5)
class ParametrizedTestCase(unittest.TestCase): """ TestCase classes that want to be parametrized should inherit from this class. """ def __init__(self, methodName, backend, host, port): super(ParametrizedTestCase, self).__init__(methodName) self.backend = backend self.host = host self.port = port if host.find(':') != -1: self.host, self.rest_port = host.split(':') else: self.rest_port = 9000 def initialize_backend(self): print '' logging.info("-------Setup Test Case-------") self.rest_client = RestClient(self.host, port=self.rest_port) if (self.backend == RemoteServer.MCD): self.memcached_backend_setup() else: self.couchbase_backend_setup() logging.info("-----Begin Test Case-----") def destroy_backend(self): logging.info("-----Tear Down Test Case-----") if (self.backend == RemoteServer.MCD): self.memcached_backend_teardown() else: self.couchbase_backend_teardown() def memcached_backend_setup(self): self.upr_client = UprClient(self.host, self.port) self.mcd_client = McdClient(self.host, self.port) resp = self.mcd_client.flush().next_response() assert resp['status'] == SUCCESS, "Flush all is not enabled" def memcached_backend_teardown(self): self.upr_client.shutdown() self.mcd_client.shutdown() def couchbase_backend_setup(self): self.rest_client = RestClient(self.host, port=self.rest_port) for bucket in self.rest_client.get_all_buckets(): logging.info("Deleting bucket %s" % bucket) assert self.rest_client.delete_bucket(bucket) logging.info("Creating default bucket") assert self.rest_client.create_default_bucket() Stats.wait_for_warmup(self.host, self.port) self.upr_client = UprClient(self.host, self.port) self.mcd_client = McdClient(self.host, self.port) def couchbase_backend_teardown(self): self.upr_client.shutdown() self.mcd_client.shutdown() for bucket in self.rest_client.get_all_buckets(): logging.info("Deleting bucket %s" % bucket) assert self.rest_client.delete_bucket(bucket) self.rest_client = None @staticmethod def parametrize(testcase_klass, backend, host, port): """ Create a suite containing all tests taken from the given subclass, passing them the parameter 'param'. """ testloader = unittest.TestLoader() testnames = testloader.getTestCaseNames(testcase_klass) suite = unittest.TestSuite() for name in testnames: suite.addTest(testcase_klass(name, backend, host, port)) return suite
from pprint import pprint import logging from rest_client import RestClient logging.basicConfig(level=logging.ERROR) if __name__ == '__main__': client = RestClient('http://jsonplaceholder.typicode.com', auth=('username', 'password'), options={'timeout': 3.0}, user_agent='PlaceHolderClient/1.0') print "\n=== PUT /posts/1 ===" response = client.call('PUT', ('posts', 1)) pprint(response.json()) print "\n=== GET /comments?postId=1 ===" response = client.call('GET', ('comments',), params=dict(postId=1)) pprint(response.json())
class Download(object): """Class for downloading health data from Garmin Connect.""" garmin_connect_base_url = "https://connect.garmin.com" garmin_connect_enus_url = garmin_connect_base_url + "/en-US" garmin_sso_base_url = 'https://sso.garmin.com/sso' garmin_connect_sso_login = '******' garmin_connect_login_url = garmin_connect_enus_url + "/signin" garmin_connect_css_url = 'https://static.garmincdn.com/com.garmin.connect/ui/css/gauth-custom-v1.2-min.css' garmin_connect_privacy_url = "//connect.garmin.com/en-U/privacy" garmin_connect_modern_url = garmin_connect_base_url + "/modern" garmin_connect_modern_proxy = 'proxy' garmin_connect_download_service = garmin_connect_modern_proxy + "/download-service/files" garmin_connect_user_profile_url = garmin_connect_modern_proxy + "/userprofile-service/userprofile" garmin_connect_wellness_url = garmin_connect_modern_proxy + "/wellness-service/wellness" garmin_connect_sleep_daily_url = garmin_connect_wellness_url + "/dailySleepData" garmin_connect_rhr = garmin_connect_modern_proxy + "/userstats-service/wellness/daily" garmin_connect_weight_url = garmin_connect_modern_proxy + "/weight-service/weight/dateRange" garmin_connect_activity_service = garmin_connect_modern_proxy + "/activity-service/activity" garmin_connect_activity_search_url = garmin_connect_modern_proxy + "/activitylist-service/activities/search/activities" garmin_connect_usersummary_url = garmin_connect_modern_proxy + "/usersummary-service/usersummary" garmin_connect_daily_summary_url = garmin_connect_usersummary_url + "/daily/" def __init__(self): """Create a new Download class instance.""" self.temp_dir = tempfile.mkdtemp() logger.debug("__init__: temp_dir= " + self.temp_dir) self.session = requests.session() self.sso_rest_client = RestClient(self.session, self.garmin_sso_base_url) self.rest_client = RestClient(self.session, self.garmin_connect_modern_url) self.activity_service_rest_client = RestClient.inherit(self.rest_client, self.garmin_connect_activity_service) self.download_service_rest_client = RestClient.inherit(self.rest_client, self.garmin_connect_download_service) self.gc_gonfig = GarminConnectConfigManager() self.download_days_overlap = self.gc_gonfig.download_days_overlap() def __get_json(self, page_html, key): found = re.search(key + r" = JSON.parse\(\"(.*)\"\);", page_html, re.M) if found: json_text = found.group(1).replace('\\"', '"') return json.loads(json_text) def login(self): """Login to Garmin Connect.""" profile_dir = GarminDBConfigManager.get_or_create_fit_files_dir() username = self.gc_gonfig.get_user() password = self.gc_gonfig.get_password() if not username or not password: print "Missing config: need username and password. Edit GarminConnectConfig.json." return logger.debug("login: %s %s", username, password) get_headers = { 'Referer' : self.garmin_connect_login_url } params = { 'service' : self.garmin_connect_modern_url, 'webhost' : self.garmin_connect_base_url, 'source' : self.garmin_connect_login_url, 'redirectAfterAccountLoginUrl' : self.garmin_connect_modern_url, 'redirectAfterAccountCreationUrl' : self.garmin_connect_modern_url, 'gauthHost' : self.garmin_sso_base_url, 'locale' : 'en_US', 'id' : 'gauth-widget', 'cssUrl' : self.garmin_connect_css_url, 'privacyStatementUrl' : '//connect.garmin.com/en-US/privacy/', 'clientId' : 'GarminConnect', 'rememberMeShown' : 'true', 'rememberMeChecked' : 'false', # 'customerId' : '', 'createAccountShown' : 'true', 'openCreateAccount' : 'false', 'displayNameShown' : 'false', 'consumeServiceTicket' : 'false', 'initialFocus' : 'true', 'embedWidget' : 'false', 'generateExtraServiceTicket' : 'true', 'generateTwoExtraServiceTickets' : 'false', 'generateNoServiceTicket' : 'false', 'globalOptInShown' : 'true', 'globalOptInChecked' : 'false', 'mobile' : 'false', 'connectLegalTerms' : 'true', 'locationPromptShown' : 'true', 'showPassword' : 'true' } response = self.sso_rest_client.get(self.garmin_connect_sso_login, get_headers, params) if response.status_code != 200: logger.error("Login get failed (%d).", response.status_code) self.__save_binary_file('login_get.html', response) return False found = re.search(r"name=\"_csrf\" value=\"(\w*)", response.text, re.M) if not found: logger.error("_csrf not found.", response.status_code) self.__save_binary_file('login_get.html', response) return False logger.debug("_csrf found (%s).", found.group(1)) data = { 'username' : username, 'password' : password, 'embed' : 'false', '_csrf' : found.group(1) } post_headers = { 'Referer' : response.url, 'Content-Type' : 'application/x-www-form-urlencoded' } response = self.sso_rest_client.post(self.garmin_connect_sso_login, post_headers, params, data) found = re.search(r"\?ticket=([\w-]*)", response.text, re.M) if not found: logger.error("Login ticket not found (%d).", response.status_code) self.__save_binary_file('login_post.html', response) return False params = { 'ticket' : found.group(1) } response = self.rest_client.get('', params=params) if response.status_code != 200: logger.error("Login get homepage failed (%d).", response.status_code) self.__save_binary_file('login_home.html', response) return False self.user_prefs = self.__get_json(response.text, 'VIEWER_USERPREFERENCES') if profile_dir: self.rest_client.save_json_to_file(profile_dir + "/profile.json", self.user_prefs) self.display_name = self.user_prefs['displayName'] self.social_profile = self.__get_json(response.text, 'VIEWER_SOCIAL_PROFILE') self.full_name = self.social_profile['fullName'] root_logger.info("login: %s (%s)", self.full_name, self.display_name) return True def __save_binary_file(self, filename, response): with open(filename, 'wb') as file: for chunk in response: file.write(chunk) def unzip_files(self, outdir): """Unzip and downloaded zipped files into the directory supplied.""" logger.info("unzip_files: " + outdir) for filename in os.listdir(self.temp_dir): match = re.search(r'.*\.zip', filename) if match: files_zip = zipfile.ZipFile(self.temp_dir + "/" + filename, 'r') files_zip.extractall(outdir) files_zip.close() def __get_stat(self, stat_function, directory, date, days, overwite): for day in progressbar.progressbar(xrange(0, days + 1)): download_date = date + datetime.timedelta(days=day) # always overight for yesterday and today since the last download may have been a partial result delta = datetime.datetime.now().date() - download_date if not stat_function(directory, download_date, overwite or delta.days <= self.download_days_overlap): break # pause for a second between every page access time.sleep(1) def __get_summary_day(self, directory, date, overwite=False): root_logger.info("get_summary_day: %s", date) date_str = date.strftime('%Y-%m-%d') params = { 'calendarDate' : date_str, '_' : str(conversions.dt_to_epoch_ms(conversions.date_to_dt(date))) } url = self.garmin_connect_daily_summary_url + self.display_name return self.rest_client.download_json_file(url, params, directory + '/daily_summary_' + date_str, overwite) def get_daily_summaries(self, directory, date, days, overwite): """Download the daily summary data from Garmin Connect and save to a JSON file.""" root_logger.info("Geting daily summaries: %s (%d)", date, days) self.__get_stat(self.__get_summary_day, directory, date, days, overwite) def __get_monitoring_day(self, date): root_logger.info("get_monitoring_day: %s", date) response = self.download_service_rest_client.get('wellness/' + date.strftime("%Y-%m-%d")) if response and response.status_code == 200: self.__save_binary_file(self.temp_dir + '/' + str(date) + '.zip', response) def get_monitoring(self, date, days): """Download the daily monitoring data from Garmin Connect, unzip and save the raw files.""" root_logger.info("Geting monitoring: %s (%d)", date, days) for day in progressbar.progressbar(xrange(0, days + 1)): day_date = date + datetime.timedelta(day) self.__get_monitoring_day(day_date) # pause for a second between every page access time.sleep(1) def __get_weight_day(self, directory, day, overwite=False): root_logger.info("Checking weight: %s overwite %r", day, overwite) date_str = day.strftime('%Y-%m-%d') params = { 'startDate' : date_str, 'endDate' : date_str, '_' : str(conversions.dt_to_epoch_ms(conversions.date_to_dt(day))) } return self.rest_client.download_json_file(self.garmin_connect_weight_url, params, directory + '/weight_' + date_str, overwite) def get_weight(self, directory, date, days, overwite): """Download the sleep data from Garmin Connect and save to a JSON file.""" root_logger.info("Geting weight: %s (%d)", date, days) self.__get_stat(self.__get_weight_day, directory, date, days, overwite) def __get_activity_summaries(self, start, count): root_logger.info("get_activity_summaries") params = { 'start' : str(start), "limit" : str(count) } response = self.rest_client.get(self.garmin_connect_activity_search_url, params=params) if response.status_code == 200: return response.json() def __save_activity_details(self, directory, activity_id_str, overwite): root_logger.debug("save_activity_details") json_filename = directory + '/activity_details_' + activity_id_str return self.activity_service_rest_client.download_json_file(activity_id_str, None, json_filename, overwite) def __save_activity_file(self, activity_id_str): root_logger.debug("save_activity_file: " + activity_id_str) response = self.download_service_rest_client.get('activity/' + activity_id_str) if response.status_code == 200: self.__save_binary_file(self.temp_dir + '/activity_' + activity_id_str + '.zip', response) else: root_logger.error("save_activity_file: %s failed (%d): %s", response.url, response.status_code, response.text) def get_activities(self, directory, count, overwite=False): """Download activities files from Garmin Connect and save the raw files.""" logger.info("Geting activities: '%s' (%d)", directory, count) activities = self.__get_activity_summaries(0, count) for activity in progressbar.progressbar(activities): activity_id_str = str(activity['activityId']) activity_name_str = conversions.printable(activity['activityName']) root_logger.info("get_activities: %s (%s)" % (activity_name_str, activity_id_str)) json_filename = directory + '/activity_' + activity_id_str + '.json' if not os.path.isfile(json_filename) or overwite: root_logger.info("get_activities: %s <- %r" % (json_filename, activity)) self.__save_activity_details(directory, activity_id_str, overwite) self.rest_client.save_json_to_file(json_filename, activity) if not os.path.isfile(directory + '/' + activity_id_str + '.fit') or overwite: self.__save_activity_file(activity_id_str) # pause for a second between every page access time.sleep(1) def get_activity_types(self, directory, overwite): """Download the activity types from Garmin Connect and save to a JSON file.""" root_logger.info("get_activity_types: '%s'", directory) return self.activity_service_rest_client.download_json_file('activityTypes', None, directory + '/activity_types', overwite) def __get_sleep_day(self, directory, date, overwite=False): json_filename = directory + '/sleep_' + str(date) params = { 'date' : date.strftime("%Y-%m-%d") } return self.rest_client.download_json_file(self.garmin_connect_sleep_daily_url + '/' + self.display_name, params, json_filename, overwite) def get_sleep(self, directory, date, days, overwite): """Download the sleep data from Garmin Connect and save to a JSON file.""" root_logger.info("Geting sleep: %s (%d)", date, days) self.__get_stat(self.__get_sleep_day, directory, date, days, overwite) def __get_rhr_day(self, directory, day, overwite=False): date_str = day.strftime('%Y-%m-%d') json_filename = directory + '/rhr_' + date_str params = { 'fromDate' : date_str, 'untilDate' : date_str, 'metricId' : 60 } return self.rest_client.download_json_file(self.garmin_connect_rhr + '/' + self.display_name, params, json_filename, overwite) def get_rhr(self, directory, date, days, overwite): """Download the resting heart rate data from Garmin Connect and save to a JSON file.""" root_logger.info("Geting rhr: %s (%d)", date, days) self.__get_stat(self.__get_rhr_day, directory, date, days, overwite)
from es_crash import ESCrash from elasticsearch import Elasticsearch import elasticsearch.helpers from bucketer import MLT, MLTStandardUnicode, MLTLetters, MLTIdentifier, MLTCamelCase, MLTLerch, MLTNGram from threshold import Threshold import json import requests from rest_client import RestClient if len(sys.argv) < 2+1: print "Usage: " + sys.argv[0] + "oracle.json http://restservicehost:port/" oracle_file_path = sys.argv[1] rest_service_url = sys.argv[2] client = RestClient(rest_service_url) es = Elasticsearch(["localhost"], retry_on_timeout=True) ESCrash.es = es beta = 1.0 comparisons = { #'ccx0.0': {'bucketer': MLTCamelCase, 'kwargs': {'thresholds':[0.0, 'lowercase':False, 'only_stack':False}}, #'ccx1.0': {'bucketer': MLTCamelCase, 'kwargs': {'thresholds':[1.0, 'lowercase':False, 'only_stack':False}}, #'ccx2.0': {'bucketer': MLTCamelCase, 'kwargs': {'thresholds':[2.0, 'lowercase':False, 'only_stack':False}}, #'ccx3.0': {'bucketer': MLTCamelCase, 'kwargs': {'thresholds':[3.0, 'lowercase':False, 'only_stack':False}}, #'ccx4_0': {'bucketer': MLTCamelCase, 'kwargs': {'thresholds':[4.0], 'lowercase':False, 'only_stack':False}}, #'ccx5.0': {'bucketer': MLTCamelCase, 'kwargs': {'thresholds':[5.0, 'lowercase':False, 'only_stack':False}}, #'ccx6.0': {'bucketer': MLTCamelCase, 'kwargs': {'thresholds':[6.0, 'lowercase':False, 'only_stack':False}}, #'ccx7.0': {'bucketer': MLTCamelCase, 'kwargs': {'thresholds':[7.0, 'lowercase':False, 'only_stack':False}},
#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse from rest_client import RestClient if __name__ == '__main__': # parse parameters parser = argparse.ArgumentParser( description="Send API calls to easy_phi web application. " "Results will be printed to console") parser.add_argument('--host', default='http://localhost:8000', help='Easy Phi platform network name') parser.add_argument('-s', '--slot', default=0, help='slot number, 0..31') parser.add_argument('-a', '--api-token', default='', help='API token, login and look at the top left corner ' 'of web interface') parser.add_argument('cmd', help='SCPI command to send') args = parser.parse_args() rc = RestClient(args.host, args.api_token or None) print rc.scpi(args.slot, args.cmd)