def main(): logger = get_root_logger() get_header(logger, 'LOADING PROJECTIONS') client = APIClient() # grab dataframe shape from a trial run data = client.get_data('weekly-projections', 'json', 'QB') test_df = json_normalize(data['Projections']) # get DF structure from columns in test_df cols = test_df.columns df = DataFrame(columns=cols) # grab current week current_week = test_df.week.values[0] # loop through all weeks up to current week for wk in [str(x) for x in range(int(current_week))]: logger.info('Processing projections for week {0}'.format(int(wk) + 1)) # loop through all positions for pos in ['QB', 'RB', 'WR', 'TE', 'K', 'DEF']: tmp_data = client.get_data('weekly-projections', 'json', pos, wk) tmp_df = json_normalize(tmp_data['Projections']) df = df.append(tmp_df) # import this df directly to PG DB conn = DBClient() conn.load(df, 'projections', schema='raw', if_exists='replace')
class JobCancellationTracker(threading.Thread): """Thread to poll test status changes on the server and kill jobs if requested""" def __init__(self, server, test_id, check_interval=60): self.test_id = test_id self.stop_requested = False self.api_client = APIClient(server) self.check_interval = check_interval threading.Thread.__init__(self) log.info("Starting to watch for job status changes on the server for: {}".format(self.test_id)) def run(self): self.api_client.login() while not self.stop_requested: time.sleep(self.check_interval) # Check job status: status = self.api_client.get('/tests/status/id/'+self.test_id) if status.get('status', None) in ('cancelled', 'cancel_pending'): self.kill_jobs() def stop(self): self.stop_requested = True def kill_jobs(self): """Kill cstar_perf_stress and cassandra-stress""" for proc in psutil.process_iter(): if proc.name().startswith("cstar_perf_stre"): log.info("Killing cstar_perf_stress - pid:{}".format(proc.pid)) proc.kill() if proc.name() == "java": if "org.apache.cassandra.stress.Stress" in " ".join(proc.cmdline()): log.info("Killing cassandra-stress - pid:{}".format(proc.pid)) proc.kill()
def main(): # First, collect data from Spotify API into a local database try: spotify_client = APIClient() spotify_client.collect_data() print("Data collected from Spotify\n") except Exception as e: print("Exception: " + str(e)) # Next, send an email notification to the recipients specified in the config database try: notifier = EmailNotifier() new_data = notifier.get_data_to_send() # Send email only if there are new releases to send if new_data: notifier.send_email() print("Email sent") else: print("No new releases") except Exception as e: print("Exception: " + str(e))
def __init__(self, server, test_id, check_interval=60): self.test_id = test_id self.stop_requested = False self.api_client = APIClient(server) self.check_interval = check_interval threading.Thread.__init__(self) log.info("Starting to watch for job status changes on the server for: {}".format(self.test_id))
class JobCancellationTracker(threading.Thread): """Thread to poll test status changes on the server and kill jobs if requested""" def __init__(self, server, test_id, check_interval=60): self.test_id = test_id self.stop_requested = False self.api_client = APIClient(server) self.check_interval = check_interval threading.Thread.__init__(self) log.info("Starting to watch for job status changes on the server for: {}".format(self.test_id)) def run(self): self.api_client.login() while self.stop_requested == False: time.sleep(self.check_interval) # Check job status: status = self.api_client.get('/tests/status/id/'+self.test_id) if status.get('status', None) in ('cancelled', 'cancel_pending'): self.kill_jobs() def stop(self): self.stop_requested = True def kill_jobs(self): """Kill cstar_perf_stress and cassandra-stress""" for proc in psutil.process_iter(): if proc.name().startswith("cstar_perf_stre"): log.info("Killing cstar_perf_stress - pid:{}".format(proc.pid)) proc.kill() if proc.name() == "java": if "org.apache.cassandra.stress.Stress" in " ".join(proc.cmdline()): log.info("Killing cassandra-stress - pid:{}".format(proc.pid)) proc.kill()
def test_get_top_stories_with_comments(): """Retrieves the top stories with all its direct kids/comments""" def get_top_stories(*args): return [{'id': 1, 'kids': [3]}, {'id': 2, 'kids': [4, 5]}] def get_kids(*args): if args[0] == {'id': 1, 'kids': [3]}: return [{'id': 3}] elif args[0] == {'id': 2, 'kids': [4, 5]}: return [{'id': 4}, {'id': 5}] api_client = APIClient() api_client.get_top_stories = Mock(side_effect=get_top_stories) api_client.get_kids = Mock(side_effect=get_kids) result = api_client.get_top_stories_with_comments(2) # get two top stories stories = result['stories'] comments = result['comments'] stories_ids = [story['id'] for story in stories] stories_ids.sort() comments_ids = [comment['id'] for comment in comments] comments_ids.sort() assert set(['comments', 'stories']) == set(result.keys()) assert stories_ids == [1, 2] assert comments_ids == [3, 4, 5]
def translate(texts): api_client = APIClient() json_response = api_client.translate(texts) translation_uids = [ translation_object['uid'] for translation_object in json_response['objects'] ] api_client.update_translations_until_completed(translation_uids)
def extract(self, service, key): # pull data from API client = APIClient() data = client.get_data(service) df = json_normalize(data[key]) return df
def get_job_status(test_id, api_endpoint_url): api_client = APIClient(api_endpoint_url) try: status = api_client.get('/tests/status/id/' + test_id) except Exception as e: log.error(e.message) status = None log.debug('JobStatusRetriever -- status of test_id {test_id} is: {s}'.format(s=status, test_id=test_id)) return status.get('status') if status else None
def request(self): api = APIClient(self.path, self.params, paths.GET_METHOD) result = api.request() handle_err_msg("Status code = " + str(result.status_code)) if result.status_code != 200: handle_err_msg("Error hitting the API " + result.status_code) return result
def create_user(client: APIClient, kwargs: dict) -> User: assert "user_index" in kwargs username, password = generate_user_credentials(kwargs["user_index"]) user = User(username=username, password=password, access_token=None, user_id=None) client.sign_up(user) client.authenticate(user) return user
def __tell_server(self, event): with open(event.src_path) as fh: stats_json = json.loads(fh.read()) last_stat = stats_json['stats'][-1] total_ops = len(self._job['operations']) * len(self._job['revisions']) msg = "Last Op Completed: {}:{}, finished {} of {} total ops ({})".format( last_stat['revision'], last_stat['type'], len(stats_json['stats']), total_ops, str(datetime.datetime.now()) ) api_client = APIClient(self._api_endpoint_url) api_client.post('/tests/progress/id/{}'.format(self._job['test_id']), data=json.dumps({'progress_msg': msg}))
def get_job_status(test_id, api_endpoint_url): api_client = APIClient(api_endpoint_url) try: status = api_client.get('/tests/status/id/' + test_id) except Exception as e: log.error(e.message) status = None log.debug( 'JobStatusRetriever -- status of test_id {test_id} is: {s}'.format( s=status, test_id=test_id)) return status.get('status') if status else None
def __tell_server(self, event): with open(event.src_path) as fh: stats_json = json.loads(fh.read()) last_stat = stats_json['stats'][-1] total_ops = len(self._job['operations']) * len(self._job['revisions']) msg = "Last Op Completed: {}:{}, finished {} of {} total ops ({})".format( last_stat['revision'], last_stat['type'], len(stats_json['stats']), total_ops, str(datetime.datetime.now())) api_client = APIClient(self._api_endpoint_url) api_client.post('/tests/progress/id/{}'.format(self._job['test_id']), data=json.dumps({'progress_msg': msg}))
def test_update_translations_until_completed(db_wrapper): """fetching and persisting translations until their status becomes 'completed' """ api_client = APIClient(db_wrapper) mock_get_translations = [] mock_get_translations.append([{'uid': 'fake_uid', 'status': 'new'}]) mock_get_translations.append([{'uid': 'fake_uid', 'status': 'completed'}]) api_client.get_translations = Mock(side_effect=mock_get_translations) api_client.update_translations_until_completed( translation_uids=['fake_uid'], wait_time_between_updates=0) translation = db_wrapper.db.translations.find_one() assert db_wrapper.db.translations.count() == 1 assert translation['uid'] == 'fake_uid' assert translation['status'] == 'completed'
def test_get_translation(): """Retrieving translation by uid""" json_response = APIClient().translate(['hello'], source_language='en', target_languages=['fr']) translation_request = json_response['objects'][0] json_response = APIClient().get_translation(translation_request['uid']) print(json_response) assert json_response['uid'] == translation_request['uid'] assert json_response['text'] == translation_request['text'] assert json_response['source_language'] == translation_request[ 'source_language'] assert json_response['target_language'] == translation_request[ 'target_language']
def test_class_init(self): a = APIClient(username="******", password="******", api_url="localhost:8000/api/") self.assertIsInstance(a, APIClient)
def test_get_kids(): """Retrieves kids of an item""" # the story will have 2 descendants in total def get_item(*args): if args[0] == 2: return {'id': 2} elif args[0] == 3: return {'id': 3} api_client = APIClient() api_client.get_item = Mock(side_effect=get_item) story = {'id': 1, 'kids': [2, 3]} comments = api_client.get_kids(story) comment_ids = [comment['id'] for comment in comments] comment_ids.sort() assert comment_ids == [2, 3]
def main(): today = datetime.datetime.now() client = APIClient('https', 'cylonaem.com', 443, 'ucd-api', 'xolg-cpgo-ugzc-itve-zbdj-sjgp-tdtn-ydad') fin = open("nodes.txt", "r") for num in fin: node = int(num) print(node) generate_query(client, node, today) fin.close()
def test_get_top_stories(): """Fetching N top stories""" top_stories = APIClient().get_top_stories(2) assert len(top_stories) == 2 assert isinstance(top_stories[0], dict) assert isinstance(top_stories[1], dict) assert isinstance(top_stories[0]['title'], str) assert isinstance(top_stories[0]['score'], int) assert top_stories[0]['type'] == 'story'
def main(): logger = get_root_logger() get_header(logger, "Importing Bye weeks") client = APIClient() data = client.get_data("byes") df = None for key in data.keys(): # build DF the first time through the loop if df is None: df = json_normalize(data[key]) # append every other time else: df = df.append(json_normalize(data[key])) # import this df directly to PG DB conn = DBClient() conn.load(df, "byes", schema="raw", if_exists="replace")
def create_post(client: APIClient, kwargs: dict) -> int: assert "user" in kwargs content = ( "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor " "incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud " "exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute " "irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla " "pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia " "deserunt mollit anim id est laborum.") post = client.create_post(kwargs["user"], content) return post["id"]
def test_translate(): """Checking that we can request translation of multiple texts at once""" texts = ['hello', 'world'] json_response = APIClient().translate(texts, source_language='en', target_languages=['pt', 'fr']) objects = json_response['objects'] assert len(objects) == 4 assert set([obj['source_language'] for obj in objects]) == {'en'} assert set(([obj['text'] for obj in objects])) == set(['world', 'hello']) assert set( ([obj['target_language'] for obj in objects])) == set(['fr', 'pt'])
def main(): client = APIClient('https', 'cylonaem.com', 443, 'ucd-api', 'xolg-cpgo-ugzc-itve-zbdj-sjgp-tdtn-ydad') fin = open("nodes.txt", "r") numbers = [] for num in fin: numbers.append(num) fin.close() while True: for num in numbers: node = int(num) print(node) generate_query(client, node)
def main(): arguments = parse_arguments() settings = parse_config(arguments["config"]) client = APIClient(arguments["url"]) print("Creating Users..") users = execute_in_threads( func=partial(create_user, client), sequence=({ "user_index": i } for i in range(settings["number_of_users"])), max_sequence_length=settings["number_of_users"], ) print("Creating Posts for each User..") number_of_posts_for_each_user = [ random.randint(0, settings["max_posts_per_user"]) for _ in range(len(users)) ] total_number_of_posts = sum(number_of_posts_for_each_user) post_ids = execute_in_threads( func=partial(create_post, client), sequence=post_authors_generator(users, number_of_posts_for_each_user), max_sequence_length=total_number_of_posts, ) print("Liking random Posts by each User..") number_of_likes_for_each_user = [ random.randint(0, min(settings["max_likes_per_user"], len(post_ids))) for _ in range(len(users)) ] total_number_of_likes = sum(number_of_likes_for_each_user) execute_in_threads( func=partial(like_post, client), sequence=posts_to_like_generator(users, post_ids, number_of_likes_for_each_user), max_sequence_length=total_number_of_likes, ) print(f"\nTotal Users created: {len(users)}\n" f"Total Posts created: {total_number_of_posts}\n" f"Total number of Likes: {total_number_of_likes}")
def sort_to_order(url, token): log.info("Creating api client") api = APIClient(url, token) batch = api.batch(240).get() if batch['finalized'] == False: pass else: log.info('Deleting prints marked as reprint') api.print.delete(id="airplane,knight") log.info('Updating prints to the next step') prints = [{ "id": "widget", "title": "Widget", "copies": 1, "design_reference": "bracket", "material_reference": "MJF_PA12-BLACK", "attributes": { "prev_step": "dye_black", "next_step": "shipping", "technology": "MJF", "order_id": 443, "tray": "240", "target_date": "2022-03-18" } }, { "id": "coupling", "title": "Coupling", "copies": 1, "design_reference": "coupling", "material_reference": "SLS_PA11-BLACK", "attributes": { "prev_step": "dye_black", "next_step": "shipping", "technology": "SLS", "order_id": 302, "tray": "240", "target_date": "2022-03-18" } }, { "id": "flange", "title": "Flange", "copies": 1, "design_reference": "flange", "material_reference": "SLS_PA11-RED", "attributes": { "prev_step": "dye_red", "next_step": "shipping", "technology": "SLS", "order_id": 201, "tray": "240", "target_date": "2022-03-10" } }, { "id": "vase", "title": "Vase", "copies": 3, "design_reference": "vase", "material_reference": "MJF_PA12-RED", "attributes": { "prev_step": "dye_red", "next_step": "shipping", "technology": "MJF", "order_id": 168, "tray": "240", "target_date": "2022-07-23" } }] api.print.put(prints) log.info('Creating sub-batches') sub_batches = [{ "id": "240_no_post_process", "title": "Tray 240: no post process", "query": "tray=240&prev_step=printing", }, { "id": "240_dye_black", "title": "Tray 240: Dye black", "query": "tray=240&prev_step=dye_black", }, { "id": "240_dye_red", "title": "Tray 240: Dye red", "query": "tray=240&prev_step=dye_red", }] api.batch.put(sub_batches) log.info('Deleteing original batch') api.batch(240).delete()
def test_get_top_stories_ids(): """Checking that we get a list of integers""" top_stories_ids = APIClient().get_top_stories_ids() assert isinstance(top_stories_ids, list) assert all(isinstance(id, int) for id in top_stories_ids)
import pytest from jsonschema import validate from api_client import APIClient API_DOG = APIClient(base_address='https://dog.ceo/api') PATHS = [ '/breeds/list/all', '/breeds/image/random', '/breeds/image/random/3', '/breed/hound/images', '/breed/hound/images/random', '/breed/hound/list', '/breed/hound/afghan/images', '/breed/hound/afghan/images/random' ] NUMBERS = [0, 1, 3, 10, 50, 51] @pytest.mark.parametrize('path', PATHS) def test_valid_request(path): """ Проверка, возвращается валидный статус """ response = API_DOG.get(path=path) assert response.ok def get_breeds(): """ Возвращает список парод """ breeds = API_DOG.get(path='/breeds/list/all').json() breeds = breeds['message'].keys() return list(breeds)
import logging from api_client import APIClient logging.basicConfig(level=logging.DEBUG) with APIClient('https://httpbin.org') as client: res = client.get('/anything/one') print(res) print(res.response.status_code) print(res.response.headers)
from api_client import APIClient from trade_manager import TradeManager if __name__ == "__main__": # get api secret. with open("./.api_secret") as secret_file: api_secret = secret_file.readline() # subscribe to channels. symbol = "BTC-USD" client = APIClient(api_secret) client.subscribe_trading() client.subscribe_ticker(symbol) # bot does simple market making. trade_manager = TradeManager(client, symbol) trade_manager.trade(max_num_orders=2, max_num_days=1)
def api_client_base(path): return APIClient(base_address=path)
def api_client(requests): base_url = requests.config.getoption("--url") return APIClient(base_address=base_url)
def api_client(request): base_url = request.config.getoption('--url') return APIClient(base_address=base_url)
def perform_configuration(url, token): log.info("Creating api client") api = APIClient(url, token) log.info("Upload material references") references = [ { "id": "SLS_PA11", "material": "MTR_PRT_SLS_MAT_PA2200" }, { "id": "MJF_PA12", "material": "MTR_PRT_MJF_MAT_PA12" }, { "id": "SLS_PA11-BLACK", "material": "MTR_PRT_SLS_MAT_PA2200_DYE_black" }, { "id": "MJF_PA12-BLACK", "material": "MTR_PRT_MJF_MAT_PA12_DYE_black" }, { "id": "SLS_PA11-RED", "material": "MTR_PRT_SLS_MAT_PA2200_DYE_red" }, { "id": "MJF_PA12-RED", "material": "MTR_PRT_MJF_MAT_PA12_DYE_red" }, ] api.material_reference.put(references) log.info("Uploading print attributes") attributes = [{ "id": "next_step", "title": "Next step", "field": "next_step", "datatype": "STRING", "filtering": True, "sorting": True, "detail": True, "summary": True, "order": 0 }, { "id": "prev_step", "title": "Previous step", "field": "prev_step", "datatype": "STRING", "filtering": True, "sorting": True, "detail": True, "summary": False, "order": 1 }, { "id": "technology", "title": "Technology", "field": "technology", "datatype": "STRING", "filtering": True, "sorting": False, "detail": True, "summary": False }, { "id": "order", "title": "Order #", "field": "order_id", "datatype": "NUMBER", "filtering": True, "sorting": False, "detail": True, "summary": True }, { "id": "tray", "title": "Print tray", "field": "tray", "datatype": "STRING", "filtering": True, "sorting": False, "detail": True, "summary": False }, { "id": "target_date", "title": "Target date", "field": "target_date", "datatype": "STRING", "filtering": True, "sorting": True, "detail": True, "summary": True }] api.print_attribute.put(attributes) log.info("Uploading query categories") categories = [ { "id": "next_step", "title": "Next step" }, { "id": "order", "title": "Order #" }, { "id": "date", "title": "Target date" }, ] api.query_category.put(categories) log.info("Uploading sorting queries") sorting_queries = [{ "id": "next_step", "title": "Next step", "query": "", "category": "next_step", "dynamic_attribute": "next_step" }, { "id": "target_date", "title": "Target date", "query": "", "category": "date", "dynamic_attribute": "target_date" }, { "id": "order", "title": "Order #", "query": "", "category": "order", "dynamic_attribute": "order" }] api.query.put(sorting_queries) log.info("Subscribing to the webhook") api.webhook.post({ 'event': 'batch.finalize', 'target': 'http://example.com/on_batch_sorted/' })
#!/home/jjardel/fb/pkgs/envs/etl/bin/python from api_client import APIClient from db_client import DBClient from pandas.io.json import json_normalize client = APIClient() data = client.get_data('nfl-teams') df = json_normalize(data['NFLTeams']) # import this df directly to PG DB conn = DBClient() conn.load(df, 'teams', schema='raw', if_exists='replace')
def top_stories(number_of_stories): stories_with_comments = APIClient().get_top_stories_with_comments( number_of_stories) response = jsonify(stories_with_comments) response.status_code = 200 return response
def __init__(self, *args, prefix, **kwargs): super(Bot, self).__init__(*args, command_prefix=prefix, **kwargs) self.api_client = APIClient(self.loop) self.add_command(self.load) self.add_command(self.unload) self._was_ready_once = False
def get_top_stories_with_comments(): api_client = APIClient() db_wrapper = DatabaseWrapper() json_response = api_client.get_top_stories_with_comments() db_wrapper.upsert_stories(json_response['stories']) db_wrapper.upsert_comments(json_response['comments'])