def get_data(self): try: weights = dict() for ngi in self.parse_json(self.data): for site in ngi['site']: key = site['id'] if 'ComputationPower' in site: val = site['ComputationPower'] else: self.logger.warn( module_class_name(self) + ': No ComputationPower value for NGI:%s Site:%s' % (ngi['ngi'], site['id'])) val = '0' weights[key] = val return self._reformat(weights) except (KeyError, IndexError, ValueError) as exc: raise ConnectorParseError() except Exception as exc: if getattr(self.logger, 'job', False): self.logger.error('{} Customer:{} Job:{} : Error - {}'.format( module_class_name(self), self.logger.customer, self.logger.job, repr(exc))) else: self.logger.error('{} Customer:{} : Error - {}'.format( module_class_name(self), self.logger.customer, repr(exc))) raise exc
async def search(self, host, port, base, filter, attributes): raised_exc = None n = 1 try: client = bonsai.LDAPClient('ldap://' + host + ':' + port + '/') while n <= self.n_try: try: conn = await client.connect(True, timeout=float(self.timeout)) res = await conn.search(base, bonsai.LDAPSearchScope.SUB, filter, attributes, timeout=float(self.timeout)) return res except Exception as exc: self.logger.error('from {}.search() - {}'.format( module_class_name(self), repr(exc))) await asyncio.sleep(float(self.retry_sleep_list[n - 1])) raised_exc = exc self.logger.info(f'LDAP Connection try - {n}') n += 1 else: self.logger.error('LDAP Connection retry exhausted') except Exception as exc: self.logger.error('from {}.search() - {}'.format( module_class_name(self), repr(exc))) raise ConnectorHttpError()
def get_data(self): try: fetched_profiles = self.parse_json(self.data)['data'] target_profiles = list(filter(lambda profile: profile['name'] in self.target_profiles, fetched_profiles)) profile_list = list() if len(target_profiles) == 0: self.logger.error('Customer:' + self.logger.customer + ' Job:' + self.logger.job + ': No profiles {0} were found!'.format(', '.join(self.target_profiles))) raise SystemExit(1) for profile in target_profiles: for service in profile['services']: for metric in service['metrics']: if self.namespace: profile_name = '{0}.{1}'.format(self.namespace, profile['name']) else: profile_name = profile['name'] profile_list.append({ 'profile': profile_name, 'metric': metric, 'service': service['service'] }) return profile_list except (KeyError, IndexError, ValueError) as exc: self.logger.error(module_class_name(self) + ': Error parsing feed - %s' % (repr(exc).replace('\'', ''))) raise ConnectorParseError() except Exception as exc: if getattr(self.logger, 'job', False): self.logger.error('{} Customer:{} Job:{} : Error - {}'.format(module_class_name(self), self.logger.customer, self.logger.job, repr(exc))) else: self.logger.error('{} Customer:{} : Error - {}'.format(module_class_name(self), self.logger.customer, repr(exc))) raise exc
async def _send(self, api, data_send, connector): content, headers, status = await self.session.http_post( api, data=json.dumps(data_send), headers=self.headers) if status != 201: if connector.startswith('topology') or connector.startswith( 'downtimes'): jsonret = json.loads(content) msg = None statusmsg = jsonret.get('status', False) if statusmsg: msg = jsonret['status']['message'] else: msg = jsonret['message'] self.logger.error('%s %s() Customer:%s - HTTP POST %s' % (module_class_name(self), '_send', self.logger.customer, msg)) else: errormsg = json.loads(content) if 'errors' in errormsg: errormsg = errormsg['errors'][0]['details'] elif 'status' in errormsg: errormsg = errormsg['status']['message'] self.logger.error( '%s %s() Customer:%s Job:%s - HTTP POST %s' % (module_class_name(self), '_send', self.logger.customer, self.logger.job, errormsg)) return status
def parse_xml(self, data): try: if data is None: if getattr(self.logger, 'job', False): raise ConnectorParseError( "{} Customer:{} Job:{} : No XML data fetched".format( module_class_name(self), self.logger.customer, self.logger.job)) else: raise ConnectorParseError( "{} Customer:{} : No XML data fetched".format( module_class_name(self), self.logger.customer)) return xml.dom.minidom.parseString(data) except ExpatError as exc: msg = '{} Customer:{} : Error parsing XML feed - {}'.format( module_class_name(self), self.logger.customer, repr(exc)) raise ConnectorParseError(msg) except Exception as exc: msg = '{} Customer:{} : Error - {}'.format(module_class_name(self), self.logger.customer, repr(exc)) raise ConnectorParseError(msg)
def parse_json(self, data): try: if data is None: if getattr(self.logger, 'job', False): raise ConnectorParseError( "{} Customer:{} Job:{} : No JSON data fetched".format( module_class_name(self), self.logger.customer, self.logger.job)) else: raise ConnectorParseError( "{} Customer:{} : No JSON data fetched".format( module_class_name(self), self.logger.customer)) return json.loads(data) except ValueError as exc: msg = '{} Customer:{} : Error parsing JSON feed - {}'.format( module_class_name(self), self.logger.customer, repr(exc)) raise ConnectorParseError(msg) except Exception as exc: msg = '{} Customer:{} : Error - {}'.format(module_class_name(self), self.logger.customer, repr(exc)) raise ConnectorParseError(msg)
def parse_servicegroup_contacts(self, data): try: endpoints_contacts = list() xml_data = self.parse_xml(data) elements = xml_data.getElementsByTagName('SERVICE_GROUP') for element in elements: name, contact = None, None for child in element.childNodes: if child.nodeName == 'NAME' and child.childNodes: name = child.childNodes[0].nodeValue if child.nodeName == 'CONTACT_EMAIL' and child.childNodes: contact = child.childNodes[0].nodeValue if contact and name: endpoints_contacts.append({ 'name': name, 'contacts': [contact] }) return endpoints_contacts except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as exc: self.logger.error( module_class_name(self) + ' Customer:%s : Error parsing - %s' % (self.logger.customer, repr(exc).replace('\'', '').replace('\"', ''))) raise exc
def parse_sites_with_contacts(self, data): try: sites_contacts = list() xml_data = self.parse_xml(data) elements = xml_data.getElementsByTagName('SITE') for element in elements: sitename, contact = None, None for child in element.childNodes: if child.nodeName == 'CONTACT_EMAIL' and child.childNodes: contact = child.childNodes[0].nodeValue if child.nodeName == 'SHORT_NAME' and child.childNodes: sitename = child.childNodes[0].nodeValue if contact: if ';' in contact: lcontacts = list() for single_contact in contact.split(';'): lcontacts.append(single_contact) sites_contacts.append({ 'name': sitename, 'contacts': lcontacts }) else: sites_contacts.append({ 'name': sitename, 'contacts': [contact] }) return sites_contacts except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as exc: self.logger.error( module_class_name(self) + ' Customer:%s : Error parsing - %s' % (self.logger.customer, repr(exc).replace('\'', '').replace('\"', ''))) raise exc
def parse_contacts(self, data, root_node, child_node, topo_node): interested = ('EMAIL', 'FORENAME', 'SURNAME', 'CERTDN', 'ROLE_NAME') try: data = list() xml_data = self.parse_xml(self.data) entities = xml_data.getElementsByTagName(root_node) for entity in entities: if entity.nodeName == root_node: emails = list() for entity_node in entity.childNodes: if entity_node.nodeName == child_node: contact = entity_node email, name, surname, certdn, role = self._parse_contact( contact, *interested) emails.append({ 'email': email, 'forename': name, 'surname': surname, 'certdn': certdn, 'role': role }) if entity_node.nodeName == topo_node: entity_name = entity_node.childNodes[0].nodeValue data.append({'name': entity_name, 'contacts': emails}) return data except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as exc: self.logger.error( module_class_name(self) + ' Customer:%s : Error parsing - %s' % (self.logger.customer, repr(exc).replace('\'', '').replace('\"', ''))) raise exc
def __init__(self, connector, host, token, logger, retry, timeout=180, sleepretry=60, report=None, endpoints_group=None, date=None): self.connector = os.path.basename(connector) self.webapi_method = self.methods[self.connector] self.host = host self.token = token self.headers = {'x-api-key': self.token, 'Accept': 'application/json'} self.report = report self.logger = logger self.retry = retry self.timeout = timeout self.sleepretry = sleepretry self.retry_options = { 'ConnectionRetry'.lower(): retry, 'ConnectionTimeout'.lower(): timeout, 'ConnectionSleepRetry'.lower(): sleepretry } self.endpoints_group = endpoints_group self.date = date or self._construct_datenow() self.session = SessionWithRetry(self.logger, module_class_name(self), self.retry_options, verbose_ret=True, handle_session_close=True)
async def _update(self, api, data_send): content = await self._get(api) target = list( filter(lambda w: w['name'] == data_send['name'], content['data'])) if len(target) > 1: self.logger.error('%s %s() Customer:%s Job:%s - HTTP PUT %s' % ( module_class_name(self), '_update', self.logger.customer, self.logger.job, 'Name of resource not unique on WEB-API, cannot proceed with update' )) else: id = target[0]['id'] content, status = await self._put(api, data_send, id) if status == 200: self.logger.info('Succesfully updated (HTTP PUT) resource') else: self.logger.error( '%s %s() Customer:%s Job:%s - HTTP PUT %s' % (module_class_name(self), '_update', self.logger.customer, self.logger.job, content))
def _parse_data(self): try: xml_data = self.parse_xml(self.data) sites = xml_data.getElementsByTagName('SITE') for site in sites: site_name = site.getAttribute('NAME') if site_name not in self._sites: self._sites[site_name] = {'site': site_name} production_infra = site.getElementsByTagName( 'PRODUCTION_INFRASTRUCTURE') if production_infra: self._sites[site_name][ 'infrastructure'] = self.parse_xmltext( production_infra[0].childNodes) certification_status = site.getElementsByTagName( 'CERTIFICATION_STATUS') if certification_status: self._sites[site_name][ 'certification'] = self.parse_xmltext( certification_status[0].childNodes) try: self._sites[site_name]['ngi'] = self.parse_xmltext( site.getElementsByTagName('ROC')[0].childNodes) except IndexError: self._sites[site_name]['ngi'] = site.getAttribute('ROC') self._sites[site_name]['scope'] = ', '.join( self.parse_scopes(site)) # biomed feed does not have extensions if self.pass_extensions: try: extensions = self.parse_extensions( site.getElementsByTagName('EXTENSIONS') [0].childNodes) self._sites[site_name]['extensions'] = extensions except IndexError: pass except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as exc: msg = module_class_name( self) + ' Customer:%s : Error parsing sites feed - %s' % ( self.logger.customer, repr(exc).replace('\'', '').replace( '\"', '')) raise ConnectorParseError(msg) except ConnectorParseError as exc: raise exc
def parse_serviceendpoint_contacts(self, data): try: endpoints_contacts = list() xml_data = self.parse_xml(data) elements = xml_data.getElementsByTagName('SERVICE_ENDPOINT') for element in elements: fqdn, contact, servtype = None, None, None for child in element.childNodes: if child.nodeName == 'HOSTNAME' and child.childNodes: fqdn = child.childNodes[0].nodeValue if child.nodeName == 'CONTACT_EMAIL' and child.childNodes: contact = child.childNodes[0].nodeValue if child.nodeName == 'SERVICE_TYPE' and child.childNodes: servtype = child.childNodes[0].nodeValue if contact: if ';' in contact: lcontacts = list() for single_contact in contact.split(';'): lcontacts.append(single_contact) endpoints_contacts.append({ 'name': '{}+{}'.format(fqdn, servtype), 'contacts': lcontacts }) else: endpoints_contacts.append({ 'name': '{}+{}'.format(fqdn, servtype), 'contacts': [contact] }) return endpoints_contacts except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as exc: self.logger.error( module_class_name(self) + ' Customer:%s : Error parsing - %s' % (self.logger.customer, repr(exc).replace('\'', '').replace('\"', ''))) raise exc
def csv_to_json(self, data): data = StringIO(data) reader = csv.reader(data, delimiter=',') num_row = 0 results = [] header = [] for row in reader: if num_row == 0: header = row num_row = num_row + 1 continue num_item = 0 datum = {} for item in header: datum[item] = row[num_item] num_item = num_item + 1 results.append(datum) if not results: msg = '{} Customer:{} : Error parsing CSV feed - empty data'.format( module_class_name(self), self.logger.customer) raise ConnectorParseError(msg) return results
def get_data(self): filtered_downtimes = list() try: downtimes = self.parse_xml( self.data).getElementsByTagName('DOWNTIME') for downtime in downtimes: classification = downtime.getAttributeNode( 'CLASSIFICATION').nodeValue hostname = self.parse_xmltext( downtime.getElementsByTagName('HOSTNAME')[0].childNodes) service_type = self.parse_xmltext( downtime.getElementsByTagName('SERVICE_TYPE') [0].childNodes) start_str = self.parse_xmltext( downtime.getElementsByTagName('FORMATED_START_DATE') [0].childNodes) end_str = self.parse_xmltext( downtime.getElementsByTagName('FORMATED_END_DATE') [0].childNodes) severity = self.parse_xmltext( downtime.getElementsByTagName('SEVERITY')[0].childNodes) try: service_id = self.parse_xmltext( downtime.getElementsByTagName('PRIMARY_KEY') [0].childNodes) except IndexError: service_id = downtime.getAttribute('PRIMARY_KEY') start_time = datetime.datetime.strptime( start_str, "%Y-%m-%d %H:%M") end_time = datetime.datetime.strptime(end_str, "%Y-%m-%d %H:%M") if start_time < self.start: start_time = self.start if end_time > self.end: end_time = self.end if classification == 'SCHEDULED' and severity == 'OUTAGE': downtime = dict() if self.uid: downtime['hostname'] = '{0}_{1}'.format( hostname, service_id) else: downtime['hostname'] = hostname downtime['service'] = service_type downtime['start_time'] = start_time.strftime( '%Y-%m-%dT%H:%M:00Z') downtime['end_time'] = end_time.strftime( '%Y-%m-%dT%H:%M:00Z') filtered_downtimes.append(downtime) return filtered_downtimes except (KeyError, IndexError, AttributeError, TypeError, AssertionError) as exc: msg = '{} Customer:{} : Error parsing downtimes feed - {}'.format( module_class_name(self), self.logger.customer, repr(exc)) raise ConnectorParseError(msg) except ConnectorParseError as exc: raise exc
def _parse_data(self): try: xml_data = self.parse_xml(self.data) groups = xml_data.getElementsByTagName('SERVICE_GROUP') for group in groups: group_id = group.getAttribute('PRIMARY_KEY') if group_id not in self._service_groups: self._service_groups[group_id] = {} self._service_groups[group_id]['name'] = self.parse_xmltext( group.getElementsByTagName('NAME')[0].childNodes) self._service_groups[group_id][ 'monitored'] = self.parse_xmltext( group.getElementsByTagName('MONITORED')[0].childNodes) self._service_groups[group_id]['services'] = [] services = group.getElementsByTagName('SERVICE_ENDPOINT') self._service_groups[group_id]['scope'] = ', '.join( self.parse_scopes(group)) for service in services: tmps = dict() tmps['hostname'] = self.parse_xmltext( service.getElementsByTagName('HOSTNAME')[0].childNodes) try: tmps['service_id'] = self.parse_xmltext( service.getElementsByTagName('PRIMARY_KEY') [0].childNodes) except IndexError: tmps['service_id'] = service.getAttribute( 'PRIMARY_KEY') tmps['type'] = self.parse_xmltext( service.getElementsByTagName('SERVICE_TYPE') [0].childNodes) tmps['monitored'] = self.parse_xmltext( service.getElementsByTagName('NODE_MONITORED') [0].childNodes) tmps['production'] = self.parse_xmltext( service.getElementsByTagName('IN_PRODUCTION') [0].childNodes) tmps['scope'] = ', '.join(self.parse_scopes(service)) tmps['endpoint_urls'] = self.parse_url_endpoints( service.getElementsByTagName('ENDPOINTS') [0].childNodes) if self.pass_extensions: extensions = self.parse_extensions( service.getElementsByTagName('EXTENSIONS') [0].childNodes) tmps['extensions'] = extensions self._service_groups[group_id]['services'].append(tmps) except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as exc: msg = module_class_name( self ) + ' Customer:%s : Error parsing service groups feed - %s' % ( self.logger.customer, repr(exc).replace('\'', '').replace( '\"', '')) raise ConnectorParseError(msg) except ConnectorParseError as exc: raise exc
def _parse_data(self): try: xml_data = self.parse_xml(self.data) services = xml_data.getElementsByTagName('SERVICE_ENDPOINT') for service in services: service_id = '' if service.getAttributeNode('PRIMARY_KEY'): service_id = str(service.attributes['PRIMARY_KEY'].value) if service_id not in self._service_endpoints: self._service_endpoints[service_id] = {} self._service_endpoints[service_id][ 'hostname'] = self.parse_xmltext( service.getElementsByTagName('HOSTNAME')[0].childNodes) self._service_endpoints[service_id][ 'type'] = self.parse_xmltext( service.getElementsByTagName('SERVICE_TYPE') [0].childNodes) hostdn = service.getElementsByTagName('HOSTDN') if hostdn: self._service_endpoints[service_id][ 'hostdn'] = self.parse_xmltext(hostdn[0].childNodes) self._service_endpoints[service_id][ 'monitored'] = self.parse_xmltext( service.getElementsByTagName('NODE_MONITORED') [0].childNodes) self._service_endpoints[service_id][ 'production'] = self.parse_xmltext( service.getElementsByTagName('IN_PRODUCTION') [0].childNodes) self._service_endpoints[service_id][ 'site'] = self.parse_xmltext( service.getElementsByTagName('SITENAME')[0].childNodes) self._service_endpoints[service_id][ 'roc'] = self.parse_xmltext( service.getElementsByTagName('ROC_NAME')[0].childNodes) self._service_endpoints[service_id]['service_id'] = service_id self._service_endpoints[service_id]['scope'] = ', '.join( self.parse_scopes(service)) self._service_endpoints[service_id][ 'sortId'] = self._service_endpoints[service_id][ 'hostname'] + '-' + self._service_endpoints[ service_id]['type'] + '-' + self._service_endpoints[ service_id]['site'] self._service_endpoints[service_id][ 'url'] = self.parse_xmltext( service.getElementsByTagName('URL')[0].childNodes) if self.pass_extensions: extensions = self.parse_extensions( service.getElementsByTagName('EXTENSIONS') [0].childNodes) self._service_endpoints[service_id][ 'extensions'] = extensions self._service_endpoints[service_id][ 'endpoint_urls'] = self.parse_url_endpoints( service.getElementsByTagName('ENDPOINTS') [0].childNodes) except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as exc: msg = module_class_name( self ) + ' Customer:%s : Error parsing topology service endpoint feed - %s' % ( self.logger.customer, repr(exc).replace('\'', '').replace( '\"', '')) raise ConnectorParseError(msg) except ConnectorParseError as exc: raise exc
async def _http_method(self, method, url, data=None, headers=None): method_obj = getattr(self.session, method) raised_exc = None n = 1 if self.token: headers = headers or {} headers.update({ 'x-api-key': self.token, 'Accept': 'application/json' }) try: sleepsecs = float(self.globopts['ConnectionSleepRetry'.lower()]) while n <= self.n_try: if n > 1: if getattr(self.logger, 'job', False): self.logger.info( f"{module_class_name(self)} Customer:{self.logger.customer} Job:{self.logger.job} : HTTP Connection try - {n} after sleep {sleepsecs} seconds" ) else: self.logger.info( f"{module_class_name(self)} Customer:{self.logger.customer} : HTTP Connection try - {n} after sleep {sleepsecs} seconds" ) try: async with method_obj(url, data=data, headers=headers, ssl=self.ssl_context, auth=self.custauth) as response: if response.status in self.erroneous_statuses: if getattr(self.logger, 'job', False): self.logger.error('{}.http_{}({}) Customer:{} Job:{} - Erroneous HTTP status: {} {}'.\ format(module_class_name(self), method, url, self.logger.customer, self.logger.job, response.status, response.reason)) else: self.logger.error('{}.http_{}({}) Customer:{} - Erroneus HTTP status: {} {}'.\ format(module_class_name(self), method, url, self.logger.customer, response.status, response.reason)) break content = await response.text() if content: if self.verbose_ret: return (content, response.headers, response.status) return content if getattr(self.logger, 'job', False): self.logger.warn( "{} Customer:{} Job:{} : HTTP Empty response". format(module_class_name(self), self.logger.customer, self.logger.job)) else: self.logger.warn( "{} Customer:{} : HTTP Empty response".format( module_class_name(self), self.logger.customer)) # do not retry on SSL errors # raise exc that will be handled in outer try/except clause except ssl.SSLError as exc: raise exc # retry on client errors except (client_exceptions.ClientError, client_exceptions.ServerTimeoutError, asyncio.TimeoutError) as exc: if getattr(self.logger, 'job', False): self.logger.error( '{}.http_{}({}) Customer:{} Job:{} - {}'.format( module_class_name(self), method, url, self.logger.customer, self.logger.job, repr(exc))) else: self.logger.error( '{}.http_{}({}) Customer:{} - {}'.format( module_class_name(self), method, url, self.logger.customer, repr(exc))) raised_exc = exc # do not retry on HTTP protocol errors # raise exc that will be handled in outer try/except clause except (http_exceptions.HttpProcessingError) as exc: if getattr(self.logger, 'job', False): self.logger.error( '{}.http_{}({}) Customer:{} Job:{} - {}'.format( module_class_name(self), method, url, self.logger.customer, self.logger.job, repr(exc))) else: self.logger.error( '{}.http_{}({}) Customer:{} - {}'.format( module_class_name(self), method, url, self.logger.customer, repr(exc))) raise exc await asyncio.sleep(sleepsecs) n += 1 else: if getattr(self.logger, 'job', False): self.logger.info( "{} Customer:{} Job:{} : HTTP Connection retry exhausted" .format(module_class_name(self), self.logger.customer, self.logger.job)) else: self.logger.info( "{} Customer:{} : HTTP Connection retry exhausted". format(module_class_name(self), self.logger.customer)) raise raised_exc except Exception as exc: if getattr(self.logger, 'job', False): self.logger.error( '{}.http_{}({}) Customer:{} Job:{} - {}'.format( module_class_name(self), method, url, self.logger.customer, self.logger.job, repr(exc))) else: self.logger.error('{}.http_{}({}) Customer:{} - {}'.format( module_class_name(self), method, url, self.logger.customer, repr(exc))) raise exc finally: if not self.handle_session_close: await self.session.close()