def __init__(self, ip=None): self.base_ep = None self.admin_ep = None self.security_ep = None self.user_ep = None self.security_ep = None self.ip = ip self.mode = InterpreterMode.BASE # If you add a new endpoint, create a JSON file for it, then add it here self.admin_ep = Endpoint.factory(Endpoint.EpType.ADMIN, self.ip) self.user_ep = Endpoint.factory(Endpoint.EpType.USER, self.ip) self.security_ep = Endpoint.factory(Endpoint.EpType.SECURITY, self.ip) self.base_words = ['admin', 'user', 'security', 'login', 'exit'] base_completer = WordCompleter(words=self.base_words, ignore_case=True) admin_completer = WordCompleter(words=self.admin_ep.commands.keys(), sentence=True) user_completer = WordCompleter(words=self.user_ep.commands.keys(), sentence=True) security_completer = WordCompleter( words=self.security_ep.commands.keys(), sentence=True) self.meta_completer = MetaCompleter() self.meta_completer.set_base_completer(base_completer) self.meta_completer.set_admin_completer(admin_completer) self.meta_completer.set_user_completer(user_completer) self.meta_completer.set_security_completer(security_completer) self.meta_completer.set_mode(InterpreterMode.BASE)
def get_endpoints(self): """Return a list of all Endpoints from Asterisk.""" result = self._api.call('endpoints', http_method='GET') # Temporary until method is implemented result_list = [Endpoint(self._api), Endpoint(self._api)] #endpoints = [Endpoint(x) for x in result] return result_list
def test_mock_request(self): endpoint = Endpoint("mock") response = endpoint.make_request(None, "warfarin") obj = response.json() self.assertTrue("200", response.status_code) # print(obj["results"][0]["brand_name"]) self.assertEqual("Warfarin Sodium", obj["results"][0]["brand_name"])
def __init__(self, sock, addr): Endpoint.__init__(self, sock, addr) Greenlet.__init__(self) self.observers = BatchList() self.gamedata = Gamedata() self.cmd_listeners = defaultdict(WeakSet) self.current_game = None
def test_other_queries(self): endpoint_cache = Endpoint("mock") endpoint_cache.create_cache() results = endpoint_cache.query_cache('Aspirin', 'brand_name', ['product_ndc', 'dosage_form']) self.assertEqual(results[0]['product_ndc'], '0615-8058') self.assertEqual(results[1]['dosage_form'], 'TABLET, COATED')
def add_endpoint(self, remote_ip, remote_port): remote_address = (remote_ip, int(remote_port)) local_address = self._find_unused_local_port() new_endpoint = Endpoint(local_address, remote_address, self.transport) new_endpoint.enable() self.end_points[new_endpoint.getId()] = new_endpoint return new_endpoint.getId()
def test_part_2_lookup_pharm_classes_warfarin(self): # TODO: Complete test by adding call to method to look up pharm classes for warfarin endpoint = Endpoint("mock") pharm_classes = endpoint.get_pharm_class("warfarin") expected_mappings = { 'EPC': ['Vitamin K Antagonist'], 'MoA': ['Vitamin K Inhibitors'] } self.assertDictEqual(pharm_classes, expected_mappings)
def add_endpoint(my_request): if not my_request.pmh_url: return None endpoint_with_this_id = Endpoint.query.filter(Endpoint.repo_request_id==my_request.id).first() if endpoint_with_this_id: print u"one already matches {}".format(my_request.id) return None raw_endpoint = my_request.pmh_url clean_endpoint = raw_endpoint.strip() clean_endpoint = clean_endpoint.strip("?") clean_endpoint = re.sub(u"\?verb=.*$", "", clean_endpoint, re.IGNORECASE) print u"raw endpoint is {}, clean endpoint is {}".format(raw_endpoint, clean_endpoint) matching_endpoint = Endpoint() matching_endpoint.pmh_url = clean_endpoint repo_matches = my_request.matching_repositories() if repo_matches: matching_repo = repo_matches[0] print u"yay! for {} {} matches repository {}".format( my_request.institution_name, my_request.repo_name, matching_repo) else: print u"no matching repository for {}: {}".format( my_request.institution_name, my_request.repo_name) matching_repo = Repository() # overwrite stuff with request matching_repo.institution_name = my_request.institution_name matching_repo.repository_name = my_request.repo_name matching_repo.home_page = my_request.repo_home_page matching_endpoint.repo_unique_id = matching_repo.id matching_endpoint.email = my_request.email matching_endpoint.repo_request_id = my_request.id matching_endpoint.ready_to_run = True matching_endpoint.set_identify_and_initial_query() db.session.merge(matching_endpoint) db.session.merge(matching_repo) print u"added {} {}".format(matching_endpoint, matching_repo) print u"see at url http://unpaywall.org/sources/repository/{}".format(matching_endpoint.id) safe_commit(db) print "saved" print "now sending email" # get the endpoint again, so it gets with all the meta info etc matching_endpoint = Endpoint.query.get(matching_endpoint.id) matching_endpoint.contacted_text = "automated welcome email" matching_endpoint.contacted = datetime.datetime.utcnow().isoformat() safe_commit(db) send_announcement_email(matching_endpoint) print "email sent" return matching_endpoint
def __init__(self, host, port): """ Define the main components useful for the loop """ self.__endpoint = Endpoint(host, port) self.__connections = [] self.__pendings = []
def __init__(self, url, options=None): Endpoint.__init__(self, "") self._url = url self._client = self self.default_options = options or {} self._session = None self.persist_session = True # setup authenticator if "authentication" in self.default_options: self._setup_authentication(self.default_options["authentication"])
def __init__(self, argv): logger = Logger().logger self.appConfig = AppConfig(logger) self.logger = logger self.quitting = False self.handling_events = False self.accounts = {} self.logger.debug("Creating the Endpoint") self.endpoint = Endpoint(self)
def test_part_4_look_up_all_product_ndc_for_ingredients(self): # TODO: Complete test by adding call to look up the 'product_ndc' for the drugs which have # `CLOPIDOGREL BISULFATE` as an 'active_ingredients' # Example: lookup_method('CLOPIDOGREL BISULFATE', 'active_ingredients', ['product_ndc']) endpoint_cache = Endpoint("mock") endpoint_cache.create_cache() results = endpoint_cache.query_cache('CLOPIDOGREL BISULFATE', 'active_ingredients', ['product_ndc']) self.assertEqual(results[0]['product_ndc'], '21695-665')
def test_part_4_look_up_active_ingredients_for_product_ndc(self): # TODO: Complete test by adding call to look up the 'active_ingredients' for the product_ndc: '21695-665' # Example lookup_method('21695-665', 'product_ndc', ['active_ingredients']) endpoint_cache = Endpoint("mock") endpoint_cache.create_cache() results = endpoint_cache.query_cache('21695-665', 'product_ndc', ['active_ingredients']) self.assertEqual(results[0]['active_ingredients'], [{ "strength": "75 mg/1", "name": "CLOPIDOGREL BISULFATE" }])
def setUp(self): self.videos = [Video(0, 50), Video(1, 30)] self.cache_servers = [CacheServer(0, 40)] self.endpoints = [ Endpoint(0, { 0: 100, 1: 200 }, [(10, self.cache_servers[0])]), Endpoint(0, { 0: 100, 1: 50 }) ]
def _parse_scenarios(scenarios, lab): result = {} for line in scenarios.splitlines(): try: _, flow = line.split('From ') from_, to_ = flow.split(' to ') from_point = Endpoint(from_) to_point = Endpoint(to_) result.setdefault(from_point, []).append(to_point) except: print('*** Failed to parse scenario {}'.format(line)) raise return result
def __init__(self, hostname, username, password, port_or_endpoint=None, path=None): self.isql = ISQLWrapper(hostname, username, password) if type(port_or_endpoint) == str: #full endpoint path self.endpoint = Endpoint(port_or_endpoint) elif type(port_or_endpoint) == int: #just the port self.endpoint = Endpoint(hostname, port_or_endpoint, path) else: raise Exception("Incorrect endpoint url or port.")
def get_results_sync( browser, h3: bool, endpoint: Endpoint, warmup: bool, ) -> json: # set up the browser context and page context = browser.new_context() page = context.new_page() url = endpoint.get_url() logger.debug(f"Navigating to url: {url}") # warm up the browser warmup_if_specified_sync(page, url, warmup) # attempt to navigate to the url try: # set the timeout to be 1 min, because under some bad network condition, # connection and data transfer take longer page.set_default_timeout(60000) response = page.goto(url) # getting performance timing data # if we don't stringify and parse, things break timing_function = '''JSON.stringify(window.performance.getEntriesByType("navigation")[0])''' performance_timing = json.loads(page.evaluate(timing_function)) performance_timing['server'] = response.headers['server'] if response.status == 404: logger.error("404 Response Code") performance_timing = {'error': '404'} except Exception as e: # if we run into error, write it in the database logger.error(str(e)) performance_timing = {'error': str(e)} pass browser.close() return performance_timing
def __init__(self, endpoint, timestamp=None, value=None): if isinstance(endpoint, Endpoint): self.endpoint = endpoint elif isinstance(endpoint, dict): self.endpoint = Endpoint(endpoint["service_name"], endpoint["ipv4"]) self.timestamp = timestamp self.value = value
def get_endpoints(urls, config): """ get all endpoints from config """ endpoints = [] for e in urls: try: # now before we create the endpoint object lets see if we specified credentials # if not we will add the credentials from the environment by default if not 'credentials' in e or not 'username' in e[ 'credentials'] or not 'password' in e['credentials']: e['credentials'] = { 'username': config.http_user, 'password': config.http_pass } # add the timezone info to the endpoint (to render the correct date/time) e['timezone'] = config.timezone # create an endpoint object endpoints.append(Endpoint(**e)) except: logger.warn( "Not able to load entrypoint definition for '{}'.".format( e['id'])) pass return endpoints
def parse(self): with open(self.filename, 'r') as input_f: lines = [line.strip('\n') for line in input_f.readlines()] # Global parameters. self.n_videos, self.n_endpoints, self.n_requests, self.n_caches, self.cache_size = to_int( lines[0]) self.video_sizes = to_int(lines[1]) self.videos = [Video(i) for i in range(self.n_videos)] # Endpoints number_handled = 0 i = 2 while number_handled != self.n_endpoints: dc_latency, caches = to_int(lines[i]) i += caches # Register the latencies for this datacenter. latencies = {} for j in range(1, 1 + caches): info = to_int(lines[i + j]) latencies[info[0]] = info[1] self.endpoints.append( Endpoint(number_handled, dc_latency, latencies)) number_handled += 1 i += 1 number_handled = 0 for j in range(i, self.n_requests + i): video, source, qty = to_int(lines[j]) self.endpoints[source].requests.append( Request(number_handled, source, video, qty)) self.videos[video].requests.append( Request(number_handled, source, video, qty))
def read_data(filename): with open('../inputs/%s.in' % filename) as f: fl = line_to_int_list(f) num_endpoint = fl[1] num_request = fl[2] num_cache = fl[3] cache_size = fl[4] cache_servers = [CacheServer(i, cache_size) for i in range(num_cache)] vidoe_sizes = line_to_int_list(f) vidoes = [Video(i, s) for i, s in enumerate(vidoe_sizes)] end_points = list() for i in range(num_endpoint): l = line_to_int_list(f) endpoint = Endpoint(i) for j in range(l[1]): ll = line_to_int_list(f) cache_serv = cache_servers[ll[0]] endpoint.latency.append((ll[1], cache_serv)) end_points.append(endpoint) for i in range(num_request): lr = line_to_int_list(f) vi = vidoes[lr[0]] end_points[lr[1]].requests.append((lr[2], vi)) return (cache_servers, vidoes, end_points)
def __init__(self): parser = Parser(CONFIG_PATH) self._host = parser.server()['host'] self._port = parser.server()['port'] self._endpoints = {} for ep in parser.endpoints(): self._endpoints[ep['endpoint_url']] = Endpoint(ep)
def load_data(self, input_file): with open(input_file, 'r') as f: self.n_videos, self.n_endpoints, self.n_requests, self.n_caches, self.s_caches = list( map(int, f.readline().split())) self.video_sizes = list(map(int, f.readline().split())) for i in range(self.n_endpoints): datacenter_latency, num_connected_caches = list( map(int, f.readline().split())) e = Endpoint(datacenter_latency) for j in range(num_connected_caches): id_cache, latency_cache = list( map(int, f.readline().split())) conn = CacheConnection(id_cache, latency_cache) e.connected_caches.append(conn) self.endpoints.append(e) for i in range(self.n_requests): video, endpoint, num_requests = list( map(int, f.readline().split())) r = VideoRequest(video, endpoint, num_requests) self.endpoints[endpoint].video_requests.append(r) self.requests.append(r)
async def launch_firefox_async( pw_instance: "AsyncPlaywrightContextManager", h3: bool, endpoint: Endpoint, warmup: bool, qlog: bool, pcap: bool, expnt_id: int, run_id: int, ) -> json: # set up firefox preference firefox_prefs = {} firefox_prefs["privacy.reduceTimerPrecision"] = False if h3: if qlog: firefox_prefs[ "network.http.http3.enable_qlog"] = True # enable qlog firefox_prefs[ "network.http.http3.enabled"] = True # enable h3 protocol firefox_prefs[ "network.http.spdy.enabled.http2"] = False # disable h2 protocol firefox_prefs[ "network.http.spdy.enabled"] = False # disable h1.1 protocol # the openlightspeed server works with a different h3 version than the rest of the servers port = endpoint.get_port() h3_version = "29" if endpoint.get_endpoint() == "server-openlitespeed": h3_version = "27" firefox_prefs[ "network.http.http3.alt-svc-mapping-for-testing"] = f"{endpoint.port};h3-{h3_version}=:{port}" # attempt to launch browser try: if pcap: pcap_file = f"{os.getcwd()}/results/packets/async-{expnt_id}/firefox/{run_id}-{h3}.keys" return await pw_instance.firefox.launch( headless=True, firefox_user_prefs=firefox_prefs, env={"SSLKEYLOGFILE": pcap_file}) else: return await pw_instance.firefox.launch( headless=True, firefox_user_prefs=firefox_prefs, ) except Exception as e: # if browser fails to launch, stop this request and write to the database logger.exception(str(e)) return None
def get_endpoint(self, object_id): """Return Endpoint specified by object_id.""" result = self._api.call('endpoints', http_method='GET', object_id=object_id) # Temporary until method is implemented result = Endpoint(self._api) #endpoint = Endpoint(result) return result
def __repr__(self): acc = self.account if not acc: return Endpoint.__repr__(self) return '%s:%s:%s' % ( self.__class__.__name__, self.address[0], acc.username.encode('utf-8'), )
def test_part_2_lookup_pharm_classes_aspirin(self): # TODO: Complete test by adding call to method to look up pharm classes for aspirin endpoint = Endpoint("mock") pharm_classes = endpoint.get_pharm_class("aspirin") expected_mappings = { 'Chemical/Ingredient': ['Nonsteroidal Anti-inflammatory Compounds'], 'PE': [ 'Decreased Prostaglandin Production', 'Decreased Platelet Aggregation' ], 'EPC': [ 'Nonsteroidal Anti-inflammatory Drug', 'Platelet Aggregation Inhibitor' ], 'MoA': ['Cyclooxygenase Inhibitors'] } self.assertDictEqual(pharm_classes, expected_mappings)
def read(input): f = open(input, 'r') conf = [int(x) for x in f.readline().replace('\n', '').split(' ')] video_sizes = [int(x) for x in f.readline().replace('\n', '').split(' ')] endpoints = [] for i in range(0, conf[INDEX_NUMBER_OF_ENDPOINTS]): endpoint_raw = [int(x) for x in f.readline().replace('\n', '').split(' ')] endpoint = Endpoint(i, endpoint_raw[0]) for j in range(0, endpoint_raw[1]): cache_raw = [int(x) for x in f.readline().replace('\n', '').split(' ')] endpoint.add_cache(cache_raw[0], cache_raw[1]) endpoints.append(endpoint) for i in range(0, int(conf[INDEX_NUMBER_OF_REQUESTS])): request_raw = [int(x) for x in f.readline().replace('\n', '').split(' ')] endpoints[request_raw[1]].add_request(request_raw[0], request_raw[2]) f.close() return conf, video_sizes, endpoints
def __init__(self, name, version, context, service_url=None, **kwargs): super().__init__() self.client = None # REST API Client self.id = None self._data = None self.name = name self.context = context self.version = version self.endpoint = DotDict() if service_url: inline_endpoint_name = "{}_{}".format(Endpoint.CONST.TYPES.SANDBOX, name) self.endpoint[Endpoint.CONST.TYPES.SANDBOX] = Endpoint( inline_endpoint_name, 'http', service_url) inline_endpoint_name = "{}_{}".format( Endpoint.CONST.TYPES.PRODUCTION, name) self.endpoint[Endpoint.CONST.TYPES.PRODUCTION] = Endpoint( inline_endpoint_name, 'http', service_url) if kwargs: self._parse_json(kwargs)
async def launch_chromium_async( pw_instance: "AsyncPlaywrightContextManager", h3: bool, endpoint: Endpoint, warmup: bool, qlog: bool, pcap: bool, expnt_id: int, run_id: int, ) -> json: chromium_args = [] if h3: # set up chromium arguments for enabling h3, qlog, h3 version chromium_args = [ "--enable-quic", "--quic-version=h3-29", "--disable-http2" ] domain = endpoint.get_domain() port = endpoint.get_port() chromium_args.append(f"--origin-to-force-quic-on={domain}:{port}") if qlog: # set up a directory results/qlogs/chromium/[experimentID] to save qlog qlog_dir = f"{os.getcwd()}/results/qlogs/async-{expnt_id}/chromium/" chromium_args.append(f"--log-net-log={qlog_dir}/{run_id}.netlog") # attempt to launch browser try: if pcap: pcap_file = f"{os.getcwd()}/results/packets/async-{expnt_id}/chromium/{run_id}-{h3}.keys" return await pw_instance.chromium.launch( headless=True, args=chromium_args, env={"SSLKEYLOGFILE": pcap_file}) else: return await pw_instance.chromium.launch( headless=True, args=chromium_args, ) except Exception as e: # if browser fails to launch, stop this request and write to the database logger.error(str(e)) return None
async def launch_edge_async( pw_instance: "AsyncPlaywrightContextManager", h3: bool, endpoint: Endpoint, warmup: bool, qlog: bool, pcap: bool, expnt_id: int, run_id: int, ) -> json: edge_args = [] if (h3): edge_args = [ "--enable-quic", "--quic-version=h3-29", "--disable-http2" ] domain = endpoint.get_domain() port = endpoint.get_port() edge_args.append(f"--origin-to-force-quic-on={domain}:{port}") if qlog: qlog_dir = f"{os.getcwd()}/results/async-qlogs/{expnt_id}/edge/" edge_args.append(f"--log-net-log={qlog_dir}/{run_id}.netlog") # attempt to launch browser try: if pcap: pcap_file = f"{os.getcwd()}/results/packets/async-{expnt_id}/edge/{run_id}-{h3}.keys" return await pw_instance.chromium.launch( headless=True, executable_path='/opt/microsoft/msedge-dev/msedge', args=edge_args, env={"SSLKEYLOGFILE": pcap_file}) else: return await pw_instance.chromium.launch( headless=True, executable_path='/opt/microsoft/msedge-dev/msedge', args=edge_args, ) except Exception as e: # if browser fails to launch, stop this request and write to the database logger.error(str(e)) return None
def _parse_endpoints(self, n): """Parse the endpoints. The endpoints section starts after the first 2 lines. It describes each of the endpoints one after another. The description of each endpoint consists of the following lines: - a line containing tow numbers - the latency of serving a video request from the data center to this endpoint, in milliseconds - the number of cache servers that this endpoint is connected to - X lines describing the connections from the endpoint to each of the X connected cache servers. Each line contains the following numbers: - the ID of the cache server - the latency of serving a video request from this cache server to this endpoint, in milliseconds Args: n: the number of endpoints to parse Returns: a generator over the Endpoint objects instantiated """ with open(self._data_set, 'r') as f: f.readline() # skip the 1st line f.readline() # skip the 2nd line for i in xrange(n): data_center_latency, n_cache_servers = [ int(v) for v in f.readline().split() ] endpoint = Endpoint(i, data_center_latency) for _ in xrange(n_cache_servers): id_, latency = [int(v) for v in f.readline().split()] cache_server = self.cache_servers[id_] endpoint.latencies[cache_server] = latency cache_server.endpoints.append(endpoint) yield endpoint n -= 1
def parse(cls, id, data): token = Token() token.setId(id) data = data["token"] issued_at = None expires_at = None try: issued_at = datetime.strptime(data["issued_at"], "%Y-%m-%dT%H:%M:%S.%fZ") except Exception: issued_at = datetime.strptime(data["issued_at"], "%Y-%m-%dT%H:%M:%S.%f") try: expires_at = datetime.strptime(data["expires_at"], "%Y-%m-%dT%H:%M:%S.%fZ") except Exception: expires_at = datetime.strptime(data["expires_at"], "%Y-%m-%dT%H:%M:%S.%f") token.setCreation(issued_at) token.setExpiration(expires_at) project = Project() project.setId(data["project"]["id"]) project.setName(data["project"]["name"]) token.setProject(project) user = User() user.setId(data["user"]["id"]) user.setName(data["user"]["name"]) user.setProjectId(data["project"]["id"]) token.setUser(user) if "extras" in data: token.getExtras().update(data["extras"]) for info in data["roles"]: role = Role() role.setId(info["id"]) role.setName(info["name"]) token.getRoles().append(role) for service_info in data["catalog"]: service = Service() service.setId(service_info["id"]) service.setType(service_info["type"]) service.setName(service_info["name"]) for endpoint_info in service_info["endpoints"]: endpoint = Endpoint() endpoint.setId(endpoint_info["id"]) endpoint.setInterface(endpoint_info["interface"]) endpoint.setRegion(endpoint_info["region"]) endpoint.setRegionId(endpoint_info["region_id"]) endpoint.setURL(endpoint_info["url"]) service.getEndpoints().append(endpoint) token.getServices().append(service) return token
def __init__(self, sock, addr): Endpoint.__init__(self, sock, addr) Greenlet.__init__(self) self.ctlcmds = Channel() self.gamedata = Gamedata(recording=True)
def close(self): Endpoint.close(self) gr = self.greenlet self.greenlet = None gr and gr.kill(EndpointDied)
def close(self): Endpoint.close(self) self.kill(EndpointDied)
def gwrite(self, tag, data): log.debug('GAME_WRITE: %s', repr([tag, data])) encoded = Endpoint.encode(data) self.gdhistory.append([tag, json.loads(encoded)])
def publish(self, topic, data): self.pub.publish( 'thb.{}.{}'.format(self.node, topic), Endpoint.encode(data, Endpoint.FMT_RAW_JSON), )