def get_tunnel_details(self, job_id: str) -> Tuple[str, str]: """Get details of ssh container""" # FIXME: nasty blocking loops should be asynced or something # Add error handling to this status = None trys = 0 while status != "running": trys += 1 status = self.nomad_client.job[job_id]["Status"] if trys > 1000: raise TunnelError(detail="The tunnel failed to start.") job_info = self.nomad_client.job.get_allocations(job_id) allocation_info = self.nomad_client.allocation.get_allocation( dig(job_info, "0/ID") ) allocation_node = values(allocation_info, "NodeID") nodes = self.nomad_client.nodes.get_nodes() ip_address = next(x["Address"] for x in nodes if x["ID"] in allocation_node) allocated_ports = values(allocation_info, "Resources/Networks/0/DynamicPorts/*") ssh_port = next(x for x in allocated_ports if x["Label"] == "ssh")["Value"] if current_app.config["ENV"] == "development": ip_address = current_app.config["SEA_HOST"] return (ssh_port, ip_address)
def test_subdomain_filter(self, client, session, current_user): """Can filter a subdomain using JSON-API compliant filters""" sub1 = subdomain.ReservedSubdomainFactory(user=current_user, name="submarine") sub2 = subdomain.ReservedSubdomainFactory(user=current_user, name="sublime") session.add(sub1, sub2) session.flush() res = client.get(f"/subdomains?filter[name]=submarine") assert_valid_schema(res.get_json(), "subdomains.json") assert str(sub1.id) in values(res.get_json(), "data/*/id") assert str(sub2.id) not in values(res.get_json(), "data/*/id")
def test_subdomain_index(self, client, current_user): """User can list all of their subdomains""" sub = subdomain.SubdomainFactory.create(user=current_user) res3 = client.get("/subdomains") assert_valid_schema(res3.get_json(), "subdomains.json") assert str(sub.id) in values(res3.get_json(), "data/*/id")
def test_subdomain_get(self, client, current_user): """User can get a single subdomain""" sub = subdomain.SubdomainFactory.create(user=current_user) res3 = client.get(f"/subdomains/{sub.id}") assert_valid_schema(res3.get_json(), "subdomain.json") assert str(sub.id) in values(res3.get_json(), "data/id")
async def show_keys( client: influx.QueryClient, database: Optional[str] = None, measurement: Optional[str] = None, **_ # allow, but discard all other kwargs ) -> dict: """Selects available keys (without data) from Influx.""" query = 'SHOW FIELD KEYS' if measurement: query += ' FROM "{measurement}"' params = _prune(locals(), ['query', 'database', 'measurement']) query_response = await client.query(**params) response = dict() for path, meas_name in dpath.search(query_response, 'results/*/series/*/name', yielded=True, dirs=False): # results/[index]/series/[index]/values/*/0 values_glob = '/'.join(path.split('/')[:-1] + ['values', '*', '0']) response[meas_name] = dpath.values(query_response, values_glob) return response
def test_tunnel_open_with_subdomain(self, client, current_user, session): """User can open a tunnel when providing a subdomain they own""" sub = subdomain.ReservedSubdomainFactory(user=current_user, name="testtunnelsubdomain") session.add(sub) session.flush() res = client.post( "/tunnels", json={ "data": { "type": "tunnel", "attributes": { "port": ["http"], "sshKey": "i-am-a-lousy-public-key", }, "relationships": { "subdomain": { "data": { "type": "subdomain", "id": str(sub.id) } } }, } }, ) assert res.status_code == 201 assert len(values(res.get_json(), "data/id")) == 1 assert_valid_schema(res.get_data(), "tunnel.json") assert Tunnel.query.filter_by(user=current_user).count() == 1
def test_box_open_with_config(self, client, current_user, session): """User can open a box when providing a config they own""" conf = config.ConfigFactory(user=current_user) session.add(conf) session.flush() res = client.post( "/boxes", json={ "data": { "type": "box", "attributes": { "sshKey": "i-am-a-lousy-public-key" }, "relationships": { "config": { "data": { "type": "config", "id": str(conf.id) } } }, } }, ) assert res.status_code == 201 assert len(values(res.get_json(), "data/id")) == 1 assert_valid_schema(res.get_data(), "box.json") assert Box.query.filter_by(user=current_user).count() == 1
def _set_if_absent(d, path, value): if '*' in path: [pre, post] = path.split('*') elem_count = len(du.values(d, f'{pre}*')) for i in range(elem_count): _set_if_absent(d, f'{pre}{i}{post}', value) elif du.search(d, path) == {}: du.new(d, path, value())
def test_box_filter_by_config_name(self, client, session, current_user): """Can filter a config using JSON-API compliant filters""" conf1 = config.ConfigFactory(user=current_user, name="sub-sandwich") conf2 = config.ConfigFactory(user=current_user, name="subscription") test_box1 = box.BoxFactory(config=conf1) test_box2 = box.BoxFactory(config=conf2) session.add(test_box1, test_box2) session.flush() res = client.get(f"/boxes?filter[config][name]=sub-sandwich") assert_valid_schema(res.get_json(), "boxes.json") assert str(test_box1.id) in values(res.get_json(), "data/*/id") assert str(test_box2.id) not in values(res.get_json(), "data/*/id")
def test_get_tunnel(self, client, current_user, session): """User can get a single tunnel""" tun = tunnel.TunnelFactory(subdomain__user=current_user) session.add(tun) session.flush() res3 = client.get(f"/tunnels/{tun.id}") assert_valid_schema(res3.get_json(), "tunnel.json") assert str(tun.id) in values(res3.get_json(), "data/id")
def test_tunnel_index(self, client, current_user, session): """User can list all of their tunnels""" tun = tunnel.TunnelFactory(subdomain__user=current_user) session.add(tun) session.flush() res3 = client.get("/tunnels") assert_valid_schema(res3.get_json(), "tunnels.json") assert str(tun.id) in values(res3.get_json(), "data/*/id")
def test_get_box(self, client, current_user, session): """User can get a single box""" test_box = box.BoxFactory(config__user=current_user) session.add(test_box) session.flush() res3 = client.get(f"/boxes/{test_box.id}") assert_valid_schema(res3.get_json(), "box.json") assert str(test_box.id) in values(res3.get_json(), "data/id")
def test_box_index(self, client, current_user, session): """User can list all of their boxes""" test_box = box.BoxFactory(config__user=current_user) session.add(test_box) session.flush() res3 = client.get("/boxes") assert_valid_schema(res3.get_json(), "boxes.json") assert str(test_box.id) in values(res3.get_json(), "data/*/id")
def dump(service, format, **kwargs): """Dumps timeseries data to files. Dumps the timeseries data for all sensors in the organization. One file is generated for each sensor with the sensor id as filename and the file extension based on the requested dump format """ sensors = dpath.values(service.get_sensors(), '/data/*/id') ts.dump(service, sensors, format, **kwargs)
def sweep_real_time(self, req_id: int, encoding: str = 'utf-8') -> None: global REQUESTS # pylint: disable=global-statement,global-variable-not-assigned # TODO: use a proper database print(f'> Starting to sweep markets for request: {req_id}') markets = self.get_markets() for market in markets: url_to_poll = self.get_session(market) bstr = requests.get(url_to_poll).content res = json.loads(bstr.decode(encoding)) prices = dpath.values(res, '/Itineraries/*/PricingOptions/*/Price') urls = dpath.values(res, '/Itineraries/*/PricingOptions/*/DeeplinkUrl') # Retrieve best price per market sorted_res = sorted(zip(prices, urls)) prices, urls = zip(*sorted_res) if prices: entry = (market, markets[market], prices[0], urls[0]) # Update reqs -> update file REQUESTS[req_id].table.append(entry) # Mark job completion REQUESTS[req_id].poll_id = 0
def dump(service, label, format, **kwargs): """Dumps timeseries data to files. Dumps the timeseries data for all sensors in a given LABEL. One file is generated for each sensor with the sensor id as filename and the file extension based on the requested dump format """ label = util.lookup_resource_id(service.get_labels, label) sensors = dpath.values(service.get_label_sensors(label), '/data/*/id') ts.dump(service, sensors, format, **kwargs)
def test_tunnel_filter_by_subdomain_name(self, client, session, current_user): """Can filter a subdomain using JSON-API compliant filters""" sub1 = subdomain.ReservedSubdomainFactory(user=current_user, name="sub-sandwich") sub2 = subdomain.ReservedSubdomainFactory(user=current_user, name="subscription") tun1 = tunnel.TunnelFactory(subdomain=sub1) tun2 = tunnel.TunnelFactory(subdomain=sub2) session.add(tun1, tun2) session.flush() res = client.get(f"/tunnels?filter[subdomain][name]=sub-sandwich") assert_valid_schema(res.get_json(), "tunnels.json") assert str(tun1.id) in values(res.get_json(), "data/*/id") assert str(tun2.id) not in values(res.get_json(), "data/*/id")
def _update_label_sensors(ctx, label, sensor, set_func): service = ctx.find_object(helium.Service) label = util.lookup_resource_id(service.get_labels, label) # Fetch the existing sensors sensors = service.get_label_sensors(label).get('data') sensor_ids = dpath.values(sensors, "*/id") # Look up full sensor ids for all given sensors sensor_list = [util.lookup_resource_id(service.get_sensors, sensor_id) for sensor_id in sensor] # And perform the set operation and ensure we have a valid list sensor_ids = set_func(set(sensor_ids), set(sensor_list)) if sensor_ids is None: sensor_ids = [] service.update_label_sensors(label, sensor_ids)
def create_filename_list(photo_json): logging.info('Forming a list of filenames to upload') sizes_list = dp.values(photo_json, "//**/sizes") likes_list = dp.values(photo_json, "//**/likes/count") date_list = dp.values(photo_json, "//**/date") if len(sizes_list) == 0: logging.warning('There is no images in the profile album!') logging.info('Execution finished') return 'Warning! There is no images in the profile album!' vk_photo_filenames = [] for i in range(len(likes_list)): if likes_list[i] not in vk_photo_filenames: vk_photo_filenames.append(likes_list[i]) else: date_list[i] = (datetime.utcfromtimestamp( date_list[i]).strftime('%Y-%m-%d_%H:%M:%S')) vk_photo_filenames.append(f'{likes_list[i]}_{date_list[i]}') logging.info('Completed! List of filenames for uploading formatted') return vk_photo_filenames
def create_file_type_list(photo_json): logging.info('Forming a list of filetypes to upload') sizes_list = dp.values(photo_json, "//**/sizes") if len(sizes_list) == 0: logging.warning('There is no images in the profile album!') logging.info('Execution finished') return 'Warning! There is no images in the profile album!' types_list = [] for photo in sizes_list: types_list.append((photo[-1]['type'])) logging.info('Completed! List of filetypes for uploading formatted') return types_list
def create_upload_links_list(photo_json): logging.info('Forming a list of links to upload') sizes_list = dp.values(photo_json, "//**/sizes") if len(sizes_list) == 0: logging.warning('There is no images in the profile album!') logging.info('Execution finished') return 'Warning! There is no images in the profile album!' vk_photo_links = [] # last elem - is the largest by size - so - we take him for photo in sizes_list: vk_photo_links.append(photo[-1]['url']) logging.info('Completed! List of links for uploading formatted') return vk_photo_links
def parse(self): text = [] if self.mtype == 'tg': files = glob(f"{self.path}/**/*.json", recursive=True) for i in files: with open(i, encoding='utf-8') as obj: ms = json.load(obj) for msg in dp.values(ms, '/**/messages/*'): if msg['type'] == 'message' and type( msg['text']) == str: text.append(msg['text']) elif self.mtype == 'vk': files = glob(f"{self.path}/**/*.html", recursive=True) for k in files: with open(k, encoding='windows-1251') as obj: contents = obj.read() msg = regex.findall( r'(?<=<div>).+?(?=<div class="kludges"><div class="attachment">|<div ' r'class="kludges">|</div>)', contents) text.extend(msg) else: raise ValueError( "MType value is invalid or isn't specified! It can be either 'tg' or 'vk'." ) df = pd.DataFrame(data={'text': text}) df = df[df.text.str.len() > 0] df.text = df.text.apply(html.unescape) return df
def get_markets(self) -> Dict[str, str]: res = decode_response(URLS['markets'], **self.details) codes = dpath.values(res, '/Countries/*/Code') names = dpath.values(res, '/Countries/*/Name') return dict(zip(codes, names))
def search(self, query: str) -> Dict[str, Any]: '''Search settings matching query.''' return dpath.values(self, query, DpathMixin.separator)
def _map_script_versions(json): versions = dpath.values(json, 'meta/versions/sensor-script/*/version') return '\n'.join([v for v in versions if not v.startswith('ffffffff')])
def wrapper(*args, **kwargs): res = func(*args, **kwargs) return dpath.values(res, path)