def _napalm_ipv4_converter(hostname: str(), plateform: str(), cmd_output: json, *, filters=dict()) -> ListIPV4Interface: ipv4_addresses_lst = ListIPV4Interface(hostname=hostname, ipv4_addresses_lst=list()) for interface_name in cmd_output.get('get_interfaces_ip'): if _generic_interface_filter( interface_name=_mapping_interface_name(interface_name), plateform=plateform, filters=filters): for ip_addr in cmd_output.get('get_interfaces_ip').get( interface_name).get('ipv4'): ipv4_addresses_lst.ipv4_addresses_lst.append( IPV4Interface( interface_name=_mapping_interface_name(interface_name), ip_address_with_mask=ip_addr, netmask=cmd_output.get('get_interfaces_ip').get( interface_name).get('ipv4').get(ip_addr).get( 'prefix_length'))) print(ipv4_addresses_lst) return ipv4_addresses_lst
def update_testcase(self, updated_values: json): """update testcase from self parameters""" url = f'{self._baseurl}/testcase/{self.testcase["key"]}' folder = updated_values['folder'] if updated_values['folder'][0] == '/' \ else f"/{updated_values['folder']}" testcase = { 'name': updated_values['name'], 'objective': updated_values.get('objective', None), 'precondition': updated_values.get('precondition', None), 'status': updated_values['status'], 'priority': updated_values.get('priority', None), 'owner': updated_values.get('owner', None), 'estimatedTime': updated_values.get('estimatedTime', None), 'component': updated_values.get('component', None), 'labels': updated_values.get('labels', None), 'folder': folder, 'parameters': updated_values.get('parameters', None), 'issueLinks': updated_values.get('issueLinks', None), 'customFields': updated_values.get('customFields', None), 'testScript': {'type': 'STEP_BY_STEP', 'steps': [{k: v for k, v in d.items() if k != 'index'} for d in updated_values.get('testScript', [])['steps']]} } try: self._do('put', url, payload=strip_none_values(testcase)) except TM4JFolderNotFound: self._create_folder('TEST_CASE', folder.strip('/')) self._do('put', url, payload=strip_none_values(testcase)) finally: self._put_testcase_paramtype_property()
def from_json(cls, sched_json: json): timezone = tz.gettz(sched_json['tzname']) schedule_date = dt.datetime.strptime(sched_json["schedule_date"], date_format) schedule_date = schedule_date.replace(tzinfo=timezone) new_bs = BellSchedule( sched_json["name"], tzname=sched_json["tzname"], schedule_date=schedule_date, ) new_bs.campus, new_bs.division = sched_json.get( 'campus'), sched_json.get('division') new_bs.ts = sched_json.get("ts", dt.datetime.utcnow().timestamp()) for period in sched_json["periods"]: start_time = iso8601.parse_date(period.get("start_time")) end_time = iso8601.parse_date(period.get("end_time")) new_bs.add_period(period=Period( period.get("name"), start_time, end_time, (end_time - start_time).seconds / 60, )) return new_bs
def loadDataInObs( self, my_measure: json, obs_meteor: ObsMeteor, target_key: str, delta_values: json, my_values: json, trace_flag: bool, ): """ loadObservationDatarow load Observation dataRow from json measures load delta_values from json mesures """ obs_j = obs_meteor.data.j my_value_avg = my_values.get(target_key + '_a') my_value_instant = my_values.get(target_key + '_i') my_value_dir = my_values.get(target_key + '_di') tmp_duration = my_values.get(target_key + '_du') if isFlagged(my_measure['special'], MeasureProcessingBitMask.MeasureIsOmm): tmp_duration = 60 if my_value_avg is None and my_value_instant is None: return tmp_sum = my_value_avg if my_value_avg is not None else my_value_instant if my_value_avg is None: if my_value_instant is None: # no data suitable for us return else: my_value_avg = my_value_instant # get current values from our aggregations tmp_sum_old = tmp_duration_old = 0 if obs_j.get(target_key) is not None: tmp_sum_old = obs_j[target_key] if tmp_sum == tmp_sum_old: # no change on avg computation return tmp_duration_old = obs_j.get(target_key + '_duration') if tmp_duration is None: tmp_duration_old = obs_meteor.data.duration if tmp_duration != tmp_duration_old: raise('loadDataInObs', 'duration mitchmath for ' + target_key + ': in obs' + str(tmp_duration_old) + ', in json: ' + str(tmp_duration)) delta_values[target_key + '_sum_old'] = tmp_sum_old delta_values[target_key + '_duration_old'] = tmp_duration_old # save data in dv delta_values[target_key + '_sum'] = tmp_sum delta_values[target_key + '_duration'] = tmp_duration # save data in obs if my_value_instant is not None: obs_j[target_key] = my_value_instant if my_value_dir is not None: obs_j[target_key + '_dir'] = my_value_dir
def _napalm_lldp_converter(hostname: str(), cmd_output: json) -> ListLLDP: if cmd_output is None or cmd_output == "": return None lldp_neighbors_lst = ListLLDP(lldp_neighbors_lst=list()) if "get_lldp_neighbors_detail" in cmd_output.keys(): for interface_name, facts in cmd_output.get( "get_lldp_neighbors_detail", NOT_SET).items(): for neighbors in facts: lldp_neighbors_lst.lldp_neighbors_lst.append( LLDP(local_name=hostname, local_port=_mapping_interface_name(interface_name), neighbor_mgmt_ip=NOT_SET, neighbor_name=neighbors.get('remote_system_name', NOT_SET), neighbor_port=_mapping_interface_name( neighbors.get('remote_port', NOT_SET)), neighbor_os=neighbors.get('remote_system_description', NOT_SET), neighbor_type=_mapping_sys_capabilities( neighbors.get('remote_system_capab', NOT_SET)))) return lldp_neighbors_lst
def _arista_ipv6_converter(hostname: str(), plateform: str(), cmd_output: json, *, filters=dict()) -> ListIPV6Interface: if cmd_output is None or cmd_output == "": return None ipv6_addresses_lst = ListIPV6Interface(ipv6_addresses_lst=list()) if "interfaces" in cmd_output.keys(): for interface_name in cmd_output.get("interfaces"): if _generic_interface_filter( plateform=plateform, interface_name=_mapping_interface_name(interface_name), filters=filters): for address in cmd_output.get("interfaces").get( interface_name).get("addresses"): index_slash = str(address.get("subnet", NOT_SET)).find("/") ipv6_addresses_lst.ipv6_addresses_lst.append( IPV6Interface(interface_name=_mapping_interface_name( interface_name), ip_address_with_mask=address.get( "address", NOT_SET), netmask=str( address.get("subnet", NOT_SET))[index_slash + 1:])) return ipv6_addresses_lst
def shouldNullify(exclusion: json, src_key: str) -> bool: """ shouldNullify Check if the exclusion requires to nullify the measure Parameter: exclusion: json coming from exclusion table src_key: key to check in the exclusion field """ try: if exclusion is not None: if (exclusion.__contains__(src_key) is True and exclusion[src_key] == "null") or exclusion.__contains__(src_key) is False: return True return False except Exception as e: if e.__dict__.__len__() == 0 or "done" not in e.__dict__: exception_type, exception_object, exception_traceback = sys.exc_info( ) exception_info = e.__repr__() filename = exception_traceback.tb_frame.f_code.co_filename funcname = exception_traceback.tb_frame.f_code.co_name line_number = exception_traceback.tb_lineno e.info = { "i": str(exception_info), "n": funcname, "f": filename, "l": line_number, } e.done = True raise e
def _nexus_lldp_converter(hostname: str(), cmd_output: json) -> ListLLDP: if cmd_output is None or cmd_output == "": return None lldp_neighbors_lst = ListLLDP(lldp_neighbors_lst=list()) if "TABLE_nbor_detail" in cmd_output.keys(): for lldp_neighbor in cmd_output.get('TABLE_nbor_detail', NOT_SET).get( "ROW_nbor_detail", NOT_SET): neighbor_type_lst = list() for sys_capability in lldp_neighbor.get("system_capability", NOT_SET): neighbor_type_lst.append( _mapping_sys_capabilities(sys_capability)) lldp_neighbors_lst.lldp_neighbors_lst.append( LLDP(local_name=hostname, local_port=_mapping_interface_name( lldp_neighbor.get("l_port_id", NOT_SET)), neighbor_mgmt_ip=lldp_neighbor.get("mgmt_addr", NOT_SET), neighbor_name=lldp_neighbor.get("sys_name", NOT_SET), neighbor_port=_mapping_interface_name( lldp_neighbor.get("port_id", NOT_SET)), neighbor_os=lldp_neighbor.get("sys_desc", NOT_SET), neighbor_type=neighbor_type_lst)) return lldp_neighbors_lst
def parse_config(config: json, logger: AirbyteLogger) -> Dict[str, Any]: """ Convert dict of config values to firebolt.db.Connection arguments :param config: json-compatible dict of settings :param logger: AirbyteLogger instance to print logs. :return: dictionary of firebolt.db.Connection-compatible kwargs """ connection_args = { "database": config["database"], "auth": UsernamePassword(config["username"], config["password"]), "api_endpoint": config.get("host", DEFAULT_API_URL), "account_name": config.get("account"), } # engine can be a name or a full URL of a cluster engine = config.get("engine") if engine: if "." in engine: connection_args["engine_url"] = engine else: connection_args["engine_name"] = engine else: logger.info( "Engine parameter was not provided. Connecting to the default engine." ) return connection_args
def _compare_bond(host_keys, hostname, bond_host_data:None, bond_yaml_data:json) -> bool: if bond_yaml_data is None: return False verity_bonds_lst = ListBOND( bonds_lst=list() ) if BOND_DATA_HOST_KEY in host_keys and hostname in bond_yaml_data.keys(): for bond in bond_yaml_data.get(hostname): ports_members = list() for port in bond.get("ports_members", NOT_SET): ports_members.append( _mapping_interface_name( port ) ) verity_bonds_lst.bonds_lst.append( BOND( bond_name=bond.get("bond_name", NOT_SET), ports_members=ports_members, vlans_members=bond.get("vlans_members", list()), native_vlan=bond.get("native_vlan", NOT_SET), mode=bond.get("mode", NOT_SET), ) ) return verity_bonds_lst == bond_host_data else: print(f"{HEADER_GET} {hostname} is not present in {PATH_TO_VERITY_FILES}/{TEST_TO_EXC_BOND_KEY}.") return False
def _nexus_cdp_converter(hostname:str(), cmd_output:json) -> ListCDP: cdp_neighbors_lst = ListCDP(list()) if "TABLE_cdp_neighbor_detail_info" in cmd_output.keys(): for cdp_neighbor in cmd_output.get('TABLE_cdp_neighbor_detail_info', NOT_SET).get( "ROW_cdp_neighbor_detail_info", NOT_SET): neighbor_type_lst = list() for sys_capability in cdp_neighbor.get("capability", NOT_SET): neighbor_type_lst.append(_mapping_sys_capabilities(sys_capability)) lldp_obj = CDP( local_name=hostname, local_port=_mapping_interface_name(cdp_neighbor.get("intf_id", NOT_SET)), neighbor_mgmt_ip=cdp_neighbor.get("v4addr", NOT_SET), neighbor_name=cdp_neighbor.get("device_id", NOT_SET), neighbor_port=_mapping_interface_name(cdp_neighbor.get("port_id", NOT_SET)), neighbor_os=cdp_neighbor.get("version", NOT_SET), neighbor_type=neighbor_type_lst ) cdp_neighbors_lst.cdp_neighbors_lst.append(lldp_obj) return cdp_neighbors_lst
def extractappfields(appjson:json): """ parse Apple Lookup API result and return dictionary of fields """ result = {} if 'results' in appjson and len(appjson['results']) >= 1: appjson = appjson['results'][0] # get type of 'adam' (PHG terminology for iTunes product) if 'kind' not in appjson: if 'wrapperType' not in appjson: adamtype = 'NO APPLE RESULT' else: adamtype = appjson['wrapperType'] else: adamtype = appjson['kind'] result['adamtype'] = adamtype result['title'] = appjson.get('trackName', 'NULL') result['description'] = appjson.get('description', 'NULL') result['price'] = appjson.get('price', 'NULL') result['primarygenre'] = appjson.get('primaryGenreName', 'NULL') result['version'] = appjson.get('version', 'NULL') try: result['releasedate'] = datetime.strptime(appjson['releaseDate'],'%Y-%m-%dT%H:%M:%SZ') except: result['releasedate'] = 'NULL' result['rating'] = appjson.get('averageUserRating', 'NULL') result['ratings'] = appjson.get('userRatingCount', 'NULL') result['currrating'] = appjson.get('averageUserRatingForCurrentVersion', 'NULL') result['currratings'] = appjson.get('userRatingCountForCurrentVersion', 'NULL') result['currentVersionReleaseDate'] = appjson.get('currentVersionReleaseDate', 'NULL') return result
def __init__(self, data: json, score_calculator: ScoreCalculator): general_data = data.get('general_data') geo_data = data.get('geo_data') # GENERAL DATA self._id = int(general_data.get('obj_scoutId')) self._region_1 = general_data.get('obj_regio1') self._region_2 = general_data.get('obj_regio2') self._region_3 = general_data.get('obj_regio3') self._street = general_data.get('obj_street') self._heating_type = general_data.get('central_heating') self._total_rent = Decimal(general_data.get('obj_totalRent') or '0.0') self._base_rent = Decimal(general_data.get('obj_baseRent')) self._year_construction = int( general_data.get('obj_yearConstructed') or 1920) self._living_space = float(general_data.get('obj_livingSpace')) self._zipcode = general_data.get('obj_zipCode') self._condition = general_data.get('obj_condition') self._pets_allowed = general_data.get('obj_petsAllowed') self._internet_down_speed = general_data.get( 'obj_telekomDownloadSpeed') self._has_kitchen = general_data.get('obj_hasKitchen') self._has_garden = general_data.get('obj_hasGarden') self._no_rooms = float(general_data.get('obj_noRooms') or -1) self._no_subways = 0 self._no_trains = 0 self._no_supermarkets = 0 # SCORE self._score = 0 self._score_calculator = score_calculator # GEO DATA self._lat = geo_data.lat self._lng = geo_data.lng
def build_query(self, q: json) -> str: """ Constructs a SQL query from a JSON input. Args: q ({}): JSON query input Returns (str): the SQL query string """ try: # table name required table = q.get('table') if table is None: raise RequestParseException("No table named in request") elif table not in self.db_tables: raise RequestParseException(invalid_table_msg(table)) if table == 'intake': from .models import IntakeRow self.col_names = [x.name.lower() for x in IntakeRow.ColNames] elif table == 'archive': from .models import ArchiveRow self.col_names = [x.name.lower() for x in ArchiveRow.ColNames] elif table == 'metadata': from .models import MetadataRow self.col_names = [x.name.lower() for x in MetadataRow.ColNames] elif table == 'txn_history': from .models import TxnHistoryRow self.col_names = [x.name.lower() for x in TxnHistoryRow.ColNames] elif table == 'reports': from .models import ReportsRow self.col_names = [x.name.lower() for x in ReportsRow.ColNames] elif table == 'violations': from .models import ViolationsRow self.col_names = [x.name.lower() for x in ViolationsRow.ColNames] else: raise RequestParseException("Requested table not found") # if columns not listed, assume all columns = q.get('columns') if columns is None: columns = '*' elif not isinstance(columns, list): raise RequestParseException("Columns must be present as list in request body") if columns != '*': for col in columns: self.validate_column(col) query = f"SELECT {', '.join(columns)} FROM {table} " # get extended filtering where = q.get('where') if where is not None: query += 'WHERE ' # iterate over operators query += self.parse_op(where) return query + ';' except RequestParseException as e: e.msg = invalid_request_msg(e.msg) raise e
def try_connect(self, logger: AirbyteLogger, config: json): google_client = GoogleClient( credentials_json=config.get("credentials_json"), email=config.get("email")) site_urls = config.get("site_urls").replace(" ", "").split(",") for site in site_urls: google_client.get(method_name="get", resource_name="sites", params={"siteUrl": site})
def __init__(self, league_name: str, data: json): self.league = league_name self._data = data self.sport = data.get('sport') self.baseUrl = data.get('base_url') self._teams = [] self._conferences = [] self._rankings = {} self._currentGames = []
def __init__(self, api: GrocyApiClient, endpoint: str, parsed_json: json): self.__api = api self.__parsed_json = parsed_json self.__id = parse_int(parsed_json.get('id')) self.__endpoint = f"{endpoint}/{self.__id}" self.__userfields_enpoint = self.__endpoint.replace( 'objects', 'userfields') self.__row_created_timestamp = parse_date( parsed_json.get('row_created_timestamp'))
def sort_output_json(self, output_json: json, movie_names: List) -> List: """ This function is to be used by parse_output_json to sort the dict. :rtype: Sorted list of movies """ if self.imdb_col: return sorted(movie_names, key=lambda x: output_json.get(x).get("imdb_info").get(self.column)) else: return sorted(movie_names, key=lambda x: output_json.get(x).get(self.column))
def loadInObs(self, poste_metier, my_measure: json, json_file_data: json, measure_idx: int, m_agg_j: json, obs_meteor: ObsMeteor, delta_values: json, trace_flag: bool = False): """ processObservation load json data in Observation table load max/min return the delta_values to be added in all aggregations some methods are implemented here, some in the inherited class """ # load field if defined in json src_key = my_measure['src_key'] target_key = my_measure['target_key'] # get exclusion, and return if value is nullified exclusion = poste_metier.exclusion(my_measure['type_i']) # to check later... # if shouldNullify(exclusion, src_key) is True: # return my_values = {} if target_key == "rain_rate": target_key = "rain_rate" self.loadValuesFromCurrent(my_measure, json_file_data, measure_idx, src_key, target_key, exclusion, my_values, obs_meteor.data.stop_dat, trace_flag) if my_values.__len__() == 0 and m_agg_j.__len__() == 0: return if (isFlagged(my_measure['special'], MeasureProcessingBitMask.NotAllowedInCurrent) is True): return # load Json data in dv # update duration & agg_start_dat in obs if needed if obs_meteor.data.duration == 0 and my_values.__len__() > 1: tmp_duration = delta_values.get(target_key + '_du') obs_meteor.data.duration = tmp_duration # compute our agg_h.start_dat for faster retrieval of observation for a given agg_h.start_dat obs_meteor.data.agg_start_dat = calcAggDate('H', obs_meteor.data.stop_dat, tmp_duration, True) # double check that the duration are compatible if obs_meteor.data.duration != tmp_duration: raise Exception('loadObsDatarow', 'incompatible durations -> in table obs: ' + str(obs_meteor.data.duration) + ', in json: ' + str(tmp_duration)) # load data from dv to obs self.loadDataInObs(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag) # check maxmin that need to be regenated later self.checkMaxMinToRegenerate(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag) # load Max/Min in obs, and in dv self.loadMaxMinInObs(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag) return
def _extract_github_user_info(email_resp: json, user_resp: json) -> Tuple[str, str, str]: verified_email = next( email.get('email') for email in email_resp.json() if email.get('primary') and email.get('verified')) verified_name = user_resp.json().get('name') verified_user_id = user_resp.json().get('id') return verified_email, verified_user_id, verified_name
def __init__(self, json_parameter: json): self.name = json_parameter['name'] self.description = json_parameter.get('description') self.types = json_parameter.get('types') self.optional = json_parameter.get('optional', False) self.depth = json_parameter.get('depth', 0) self.type = None if 'types' in json_parameter: self.type = TsType.parse(json_parameter['types']) self.sub_parameters = []
def process_data(self, json_data: json) -> None: if type(json_data) is list: for el in json_data: self.process_data(el) else: if 'children' in json_data.keys(): self.process_data(json_data['children']) else: if 'symbol' in json_data.keys(): c = Currency.parse_json(json_data, self.NAME) NameIndexes.get_instance().add_to_index(c) return None
def __init__(self, player_id: str, data: json): self.playerID = player_id self._data = data self.league = data.get('league') self.sport = data.get('sport') self.baseUrl = data.get('base_url') self.firstName = None self.lastName = None self.position = None self.jerseyNumber = None self._team = None self.update_player_info()
def _compare_ospf(host_keys, hostname, ospf_host_data:OSPF, ospf_yaml_data:json, level_test:int): ospf_sessions_vrf_lst = ListOSPFSessionsVRF(list()) if OSPF_SESSIONS_HOST_KEY in host_keys and hostname in ospf_yaml_data.keys(): for vrf_name, ospf_vrf_facts in ospf_yaml_data.get(hostname, NOT_SET).items(): ospf_sessions_vrf = OSPFSessionsVRF( router_id=ospf_vrf_facts.get('router_id', NOT_SET), vrf_name=vrf_name, ospf_sessions_area_lst=ListOSPFSessionsArea(list()) ) for area_id, session_in_area in ospf_vrf_facts.get('area_id', NOT_SET).items(): ospf_session_area = OSPFSessionsArea( area_number=area_id, ospf_sessions=ListOSPFSessions(list()) ) for neighbor in session_in_area: if isinstance(neighbor,dict): ospf = OSPFSession( hostname=hostname, peer_rid=neighbor.get('peer_rid', NOT_SET), peer_hostname=neighbor.get('peer_name', NOT_SET), session_state=neighbor.get('state', NOT_SET), local_interface=_mapping_interface_name(neighbor.get('local_interface', NOT_SET)), peer_ip=neighbor.get('peer_ip', NOT_SET), ) ospf_session_area.ospf_sessions.ospf_sessions_lst.append(ospf) ospf_sessions_vrf.ospf_sessions_area_lst.ospf_sessions_area_lst.append(ospf_session_area) ospf_sessions_vrf_lst.ospf_sessions_vrf_lst.append(ospf_sessions_vrf) verity_ospf = OSPF( hostname=hostname, ospf_sessions_vrf_lst=ospf_sessions_vrf_lst ) return verity_ospf == ospf_host_data else: print(f"{HEADER_GET} Key {OSPF_SESSIONS_HOST_KEY} is missing for {hostname} or verity file is empty for this host") return False
def load_agg_info(agg_result: list, j: json, keys: list): if j.items().__len__() == 0: return "...... *** NO DATA ***" for key, value in j.items(): if key == 'dv': continue if keys[0] == '*': agg_result.append({'key': key, 'value': value}) continue for onekey in keys: if key.startswith(onekey): agg_result.append({'key': key, 'value': value}) continue
def _juniper_static_converter(hostname:str(), cmd_outputs:json) -> ListStatic: static_routes_lst = ListStatic( static_routes_lst=list() ) if "route-information" in cmd_outputs.keys(): for instance_route in cmd_outputs.get("route-information")[0].get("route-table"): if "rt" in instance_route.keys(): for route in instance_route.get("rt"): nexthops_lst = ListNexthop( nexthops_lst=list() ) for route_entry in route.get("rt-entry"): for nexthop in route_entry.get("nh"): nexthops_lst.nexthops_lst.append( Nexthop( ip_address=nexthop.get('to')[0].get("data", NOT_SET), is_in_fib=nexthop.get('always_true_in_juniper', True), out_interface=_mapping_interface_name( nexthop.get('via')[0].get("data", NOT_SET) ), preference=route_entry.get('preference')[0].get("data", NOT_SET), metric=NOT_SET, active=nexthop.get('always_true_in_juniper', True) ) ) # Example of default table route => "data" : "inet.0" if instance_route.get("table-name")[0].get("data") == "inet.0": vrf_name = "default" else: index_dot = instance_route.get("table-name")[0].get("data").find(".") vrf_name = instance_route.get("table-name")[0].get("data")[:index_dot] # Output is => "data" : "10.255.255.103/32" index_slash = str(route.get("rt-destination")[0].get("data", NOT_SET)).find("/") static_routes_lst.static_routes_lst.append( Static( vrf_name=vrf_name, prefix=str(route.get("rt-destination")[0].get("data", NOT_SET))[:index_slash], netmask=str(route.get("rt-destination")[0].get("data", NOT_SET))[index_slash + 1:], nexthop=nexthops_lst ) ) return static_routes_lst
def _juniper_ospf_converter(hostname: str(), cmd_outputs: json) -> OSPF: if cmd_outputs is None: return None ospf_vrf_lst = ListOSPFSessionsVRF(list()) for vrf in cmd_outputs: if OSPF_RIB_KEY in cmd_outputs.get( vrf).keys() and OSPF_NEI_KEY in cmd_outputs.get(vrf).keys(): area_in_vrf_dict = dict() ospf_area_lst = ListOSPFSessionsArea(ospf_sessions_area_lst=list()) for nei in cmd_outputs.get(vrf).get(OSPF_NEI_KEY).get( "ospf-neighbor-information")[0].get("ospf-neighbor"): if nei.get("ospf-area")[0].get("data") not in area_in_vrf_dict: area_in_vrf_dict[nei.get("ospf-area")[0].get( "data")] = list() area_in_vrf_dict.get( nei.get("ospf-area")[0].get("data")).append( OSPFSession( hostname=hostname, peer_rid=nei.get("neighbor-id")[0].get("data"), peer_hostname=NOT_SET, session_state=nei.get( "ospf-neighbor-state")[0].get("data"), local_interface=nei.get("interface-name")[0].get( "data"), peer_ip=nei.get("neighbor-address")[0].get( "data"))) for area in area_in_vrf_dict: ospf_area_lst.ospf_sessions_area_lst.append( OSPFSessionsArea( area_number=area, ospf_sessions=ListOSPFSessions( ospf_sessions_lst=area_in_vrf_dict.get(area)))) ospf_vrf_lst.ospf_sessions_vrf_lst.append( OSPFSessionsVRF( vrf_name=vrf, router_id=cmd_outputs.get(vrf).get(OSPF_RIB_KEY).get( "ospf-overview-information")[0].get("ospf-overview") [0].get("ospf-router-id")[0].get("data", NOT_SET), ospf_sessions_area_lst=ospf_area_lst)) return OSPF(hostname=hostname, ospf_sessions_vrf_lst=ospf_vrf_lst)
def get_company_details(company: json, link=None, proxy=None): '''Extracts the company data from JSON response''' id = company.get('id', None) company_name = company.get('name', None) company_logo = company.get('logo', None) slogan = company.get('headline', None) description = company.get('description', None) if locations := get_by_path(company, ('products', 0, 'available_countries'), default=None): locations = '; '.join( [location['name'].strip() for location in locations if locations])
def _get_contact(self, contact_data: json) -> iter: contact_all = contact_data.get('linkmanList') gname_map: dict = contact_data.get('uuidGuuidGnameMap') if contact_all is None or len(contact_all) == 0: self._logger.error("No contact in this account.") return for line_one in contact_all: contact_id = line_one.get('uuid') data_one = CONTACT_ONE(self._userid, contact_id, self.task, self.task.apptype) data_one.phone = line_one.get('mobile') data_one.email = line_one.get('email') data_one.nickname = line_one.get('name') data_one.group = gname_map[contact_id][0][0]['groupName'] yield data_one
def get_body(body: json) -> APIBodyModel: """ Extracts the contents from the body json. Either raw or urlencoded :param body: the current APIModel object being explored :return: instance of APIBodyModel """ api_body = APIBodyModel() api_body.mode = body.get(MODE, '') if body.get(RAW, None) is not None: api_body.raw = body.get(RAW) elif body.get(api_body.mode, None) is not None: api_body.key_values = DocumentGenerator.get_key_values(body.get(api_body.mode)) return api_body
def _cumulus_lldp_converter(hostname: str(), cmd_output: json) -> ListLLDP: if cmd_output is None or cmd_output == "": return None lldp_neighbors_lst = ListLLDP(lldp_neighbors_lst=list()) if "lldp" in cmd_output.keys(): if "interface" in cmd_output.get('lldp')[0].keys(): for lldp_neighbor in cmd_output.get('lldp')[0].get("interface"): if lldp_neighbor.get("via", NOT_SET) == "LLDP": neighbor_type_lst = list() if lldp_neighbor.get("chassis", NOT_SET)[0].get("descr", NOT_SET) == NOT_SET: neighbor_os = NOT_SET else: neighbor_os = lldp_neighbor.get( "chassis", NOT_SET)[0].get("descr", NOT_SET)[0].get("value", NOT_SET) for capability in lldp_neighbor.get( "chassis", NOT_SET)[0].get("capability", NOT_SET): neighbor_type_lst.append( capability.get("type", NOT_SET)) lldp_neighbors_lst.lldp_neighbors_lst.append( LLDP(local_name=hostname, local_port=_mapping_interface_name( lldp_neighbor.get("name", NOT_SET)), neighbor_mgmt_ip=lldp_neighbor.get( "chassis", NOT_SET)[0].get("mgmt-ip", NOT_SET)[0].get( "value", NOT_SET), neighbor_name=lldp_neighbor.get( "chassis", NOT_SET)[0].get("name", NOT_SET)[0].get( "value", NOT_SET), neighbor_port=_mapping_interface_name( lldp_neighbor.get("port", NOT_SET)[0].get( "id", NOT_SET)[0].get("value", NOT_SET)), neighbor_os=neighbor_os, neighbor_type=neighbor_type_lst)) return lldp_neighbors_lst
def get_chat_id(parsed_json: json) -> None or int: if not parsed_json: return None try: chat_id: int = parsed_json.get("message").get("chat").get("id") except BaseException as e: print('TelegramBot: Failed to get chat id from: {}: {}'.format(parsed_json, e)) # Log Error return None return chat_id