def getWeights(self): try: res = input.connection(logger, module_class_name(self), globopts, self._o.scheme, self._o.netloc, self._o.path) if not res: raise input.ConnectorError() json_data = input.parse_json(logger, module_class_name(self), globopts, res, self._o.scheme + '://' + self._o.netloc + self._o.path) if not json_data: raise input.ConnectorError() except input.ConnectorError: self.state = False return [] else: try: weights = dict() for ngi in json_data: for site in ngi['site']: key = site['id'] if 'ComputationPower' in site: val = site['ComputationPower'] else: logger.warn(module_class_name(self) + ': No ComputationPower value for NGI:%s Site:%s' % (ngi['ngi'] ,site['id'])) val = '0' weights[key] = val return weights except (KeyError, IndexError) as e: self.state = False logger.error(module_class_name(self) + ': Error parsing feed %s - %s' % (self._o.scheme + '://' + self._o.netloc + self._o.path, repr(e).replace('\'','')))
def loadProfilesFromServer(self, server, vo, namespace, Profiles): validProfiles = dict() doFilterProfiles = False if len(Profiles) > 0: doFilterProfiles = True if not server.startswith('http'): server = 'https://' + server self._urlfeed = server + MIPAPI for i in vo: self._urlfeed = self._urlfeed + 'vo_name=' + i + '&' for j in Profiles: self._urlfeed = self._urlfeed + 'profile=' + j + '&' self._urlfeed = self._urlfeed[:-1] Profiles = [namespace.upper() + '.' + Profiles[i] for i in range(len(Profiles))] o = urlparse.urlparse(self._urlfeed, allow_fragments=True) try: assert o.scheme != '' and o.netloc != '' and o.path != '' except AssertionError: logger.error('Customer:%s Invalid POEM PI URL: %s' % (logger.customer, self._urlfeed)) raise SystemExit(1) logger.info('Customer:%s Server:%s VO:%s' % (logger.customer, o.netloc, vo[0] if len(vo) == 1 else\ '{0}'.format(','.join(vo)))) try: res = input.connection(logger, module_class_name(self), globopts, o.scheme, o.netloc, o.path + '?' + o.query) if not res: raise input.ConnectorError() json_data = input.parse_json(logger, module_class_name(self), globopts, res, self._urlfeed) if not json_data: raise input.ConnectorError() except input.ConnectorError: self.state = False else: try: for profile in json_data[0]['profiles']: if not doFilterProfiles or profile['namespace'].upper()+'.'+profile['name'] in Profiles: validProfiles[profile['namespace'].upper()+'.'+profile['name']] = profile except Exception as e: raise e else: return validProfiles
def _get_xmldata(self, scope, pi): res = input.connection(logger, module_class_name(self), globopts, self._o.scheme, self._o.netloc, pi + scope, custauth=self.custauth) if not res: raise input.ConnectorError() doc = input.parse_xml(logger, module_class_name(self), globopts, res, self._o.scheme + '://' + self._o.netloc + pi) return doc
def getDowntimes(self, start, end): filteredDowntimes = list() try: res = input.connection(logger, module_class_name(self), globopts, self._o.scheme, self._o.netloc, DOWNTIMEPI + '&windowstart=%s&windowend=%s' % (start.strftime(self.argDateFormat), end.strftime(self.argDateFormat)), custauth=self.custauth) if not res: raise input.ConnectorError() doc = input.parse_xml(logger, module_class_name(self), globopts, res, self._o.scheme + '://' + self._o.netloc + DOWNTIMEPI) if not doc: raise input.ConnectorError() except input.ConnectorError: self.state = False return [] else: downtimes = doc.getElementsByTagName('DOWNTIME') try: for downtime in downtimes: classification = downtime.getAttributeNode('CLASSIFICATION').nodeValue hostname = getText(downtime.getElementsByTagName('HOSTNAME')[0].childNodes) serviceType = getText(downtime.getElementsByTagName('SERVICE_TYPE')[0].childNodes) startStr = getText(downtime.getElementsByTagName('FORMATED_START_DATE')[0].childNodes) endStr = getText(downtime.getElementsByTagName('FORMATED_END_DATE')[0].childNodes) severity = getText(downtime.getElementsByTagName('SEVERITY')[0].childNodes) startTime = datetime.datetime.strptime(startStr, self.WSDateFormat) endTime = datetime.datetime.strptime(endStr, self.WSDateFormat) if (startTime < start): startTime = start if (endTime > end): endTime = end if classification == 'SCHEDULED' and severity == 'OUTAGE': dt = dict() dt['hostname'] = hostname dt['service'] = serviceType dt['start_time'] = startTime.strftime('%Y-%m-%d %H:%M').replace(' ', 'T', 1).replace(' ', ':') + ':00Z' dt['end_time'] = endTime.strftime('%Y-%m-%d %H:%M').replace(' ', 'T', 1).replace(' ', ':') + ':00Z' filteredDowntimes.append(dt) except (KeyError, IndexError, AttributeError, TypeError, AssertionError) as e: self.state = False logger.error(module_class_name(self) + 'Customer:%s Job:%s : Error parsing feed %s - %s' % (logger.customer, logger.job, self._o.scheme + '://' + self._o.netloc + DOWNTIMEPI, repr(e).replace('\'',''))) return [] else: return filteredDowntimes
def loadProfilesFromServer(self, server, vo, filterProfiles): validProfiles = dict() doFilterProfiles = False if len(filterProfiles) > 0: doFilterProfiles = True if not server.startswith('http'): server = 'https://' + server self._urlfeed = server + MIPAPI + vo o = urlparse.urlparse(self._urlfeed, allow_fragments=True) try: assert o.scheme != '' and o.netloc != '' and o.path != '' except AssertionError: logger.error('Customer:%s Invalid POEM PI URL: %s' % (logger.customer, self._urlfeed)) raise SystemExit(1) logger.info('Customer:%s Server:%s VO:%s' % (logger.customer, o.netloc, vo)) try: res = input.connection(logger, module_class_name(self), globopts, o.scheme, o.netloc, o.path + '?' + o.query) if not res: raise input.ConnectorError() json_data = input.parse_json(logger, module_class_name(self), globopts, res, self._urlfeed) if not json_data: raise input.ConnectorError() except input.ConnectorError: self.state = False else: try: for profile in json_data[0]['profiles']: if not doFilterProfiles or profile['namespace'].upper( ) + '.' + profile['name'] in filterProfiles: validProfiles[profile['namespace'].upper() + '.' + profile['name']] = profile except Exception as e: raise e else: return validProfiles
def _get_service_endpoints(self, serviceList, scope, doc): try: services = doc.getElementsByTagName('SERVICE_ENDPOINT') for service in services: serviceId = '' if service.getAttributeNode('PRIMARY_KEY'): serviceId = str(service.attributes['PRIMARY_KEY'].value) if serviceId not in serviceList: serviceList[serviceId] = {} serviceList[serviceId]['hostname'] = getText(service.getElementsByTagName('HOSTNAME')[0].childNodes) serviceList[serviceId]['type'] = getText(service.getElementsByTagName('SERVICE_TYPE')[0].childNodes) serviceList[serviceId]['monitored'] = getText(service.getElementsByTagName('NODE_MONITORED')[0].childNodes) serviceList[serviceId]['production'] = getText(service.getElementsByTagName('IN_PRODUCTION')[0].childNodes) serviceList[serviceId]['site'] = getText(service.getElementsByTagName('SITENAME')[0].childNodes) serviceList[serviceId]['roc'] = getText(service.getElementsByTagName('ROC_NAME')[0].childNodes) serviceList[serviceId]['service_id'] = serviceId if self.extensions: extensions = service.getElementsByTagName('EXTENSIONS')[0].childNodes serviceList[serviceId]['extensions'] = self._parse_extensions(extensions) serviceList[serviceId]['scope'] = scope.split('=')[1] serviceList[serviceId]['sortId'] = serviceList[serviceId]['hostname'] + '-' + serviceList[serviceId]['type'] + '-' + serviceList[serviceId]['site'] except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as e: logger.error(module_class_name(self) + 'Customer:%s Job:%s : Error parsing feed %s - %s' % (logger.customer, logger.job, self._o.scheme + '://' + self._o.netloc + SERVENDPI, repr(e).replace('\'', '').replace('\"', ''))) raise e
def _get_sites_internal(self, siteList, scope, doc): try: sites = doc.getElementsByTagName('SITE') for site in sites: siteName = site.getAttribute('NAME') if siteName not in siteList: siteList[siteName] = {'site': siteName} siteList[siteName]['infrastructure'] = getText( site.getElementsByTagName('PRODUCTION_INFRASTRUCTURE') [0].childNodes) siteList[siteName]['certification'] = getText( site.getElementsByTagName('CERTIFICATION_STATUS') [0].childNodes) siteList[siteName]['ngi'] = getText( site.getElementsByTagName('ROC')[0].childNodes) siteList[siteName]['scope'] = scope.split('=')[1] except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as e: logger.error( module_class_name(self) + 'Customer:%s Job:%s : Error parsing feed %s - %s' % (logger.customer, logger.job, self._o.scheme + '://' + self._o.netloc + SITESPI, repr(e).replace('\'', '').replace('\"', ''))) raise e
def _get_service_groups(self, groupList, scope, doc): try: doc = self._get_xmldata(scope, SERVGROUPPI) groups = doc.getElementsByTagName('SERVICE_GROUP') for group in groups: groupId = group.getAttribute('PRIMARY_KEY') if groupId not in groupList: groupList[groupId] = {} groupList[groupId]['name'] = getText(group.getElementsByTagName('NAME')[0].childNodes) groupList[groupId]['monitored'] = getText(group.getElementsByTagName('MONITORED')[0].childNodes) groupList[groupId]['scope'] = scope.split('=')[1] groupList[groupId]['services'] = [] services = group.getElementsByTagName('SERVICE_ENDPOINT') for service in services: serviceDict = {} serviceDict['hostname'] = getText(service.getElementsByTagName('HOSTNAME')[0].childNodes) serviceDict['type'] = getText(service.getElementsByTagName('SERVICE_TYPE')[0].childNodes) serviceDict['monitored'] = getText(service.getElementsByTagName('NODE_MONITORED')[0].childNodes) serviceDict['production'] = getText(service.getElementsByTagName('IN_PRODUCTION')[0].childNodes) groupList[groupId]['services'].append(serviceDict) except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as e: logger.error(module_class_name(self) + 'Customer:%s Job:%s : Error parsing feed %s - %s' % (logger.customer, logger.job, self._o.scheme + '://' + self._o.netloc + SERVGROUPPI, repr(e).replace('\'','').replace('\"', ''))) raise e
def getProfiles(self, Profiles, namespace, PoemServer): try: validProfiles = self.loadProfilesFromServer(PoemServer.keys()[0], PoemServer.values()[0], namespace, Profiles) name = [] for item in validProfiles.keys(): name.append(item.split('.')[-1]) if len(name) == 0: self.state = False logger.error('Customer:' + self.customer + ' Job:' + self.job + ': no profiles were fetched!') raise SystemExit(1) elif len(name) < len(Profiles): self.state = False logger.warn('Customer:' + self.customer + ' Job:' + self.job + ': profile(s) %s were not fetched.' %','.join(set(Profiles) - set(name))) profileListAvro = [] for profile in validProfiles.values(): for metric in profile['metrics']: profileListAvro.append({'profile' : namespace + '.' + profile['name'], \ 'metric' : metric['name'], \ 'service' : metric['service_flavour'], \ 'vo' : profile['vo'], \ 'fqan' : metric['fqan']}) # except (KeyError, IndexError, AttributeError, TypeError) as e: self.state = False logger.error(module_class_name(self) + ' Customer:%s : Error parsing feed %s - %s' % (logger.customer, self._urlfeed, repr(e).replace('\'','').replace('\"', ''))) return [] else: return profileListAvro
def _get_service_groups(self, groupList, scope, doc): try: doc = self._get_xmldata(scope, SERVGROUPPI) groups = doc.getElementsByTagName('SERVICE_GROUP') for group in groups: groupId = group.getAttribute('PRIMARY_KEY') if groupId not in groupList: groupList[groupId] = {} groupList[groupId]['name'] = getText(group.getElementsByTagName('NAME')[0].childNodes) groupList[groupId]['monitored'] = getText(group.getElementsByTagName('MONITORED')[0].childNodes) groupList[groupId]['scope'] = scope.split('=')[1] groupList[groupId]['services'] = [] services = group.getElementsByTagName('SERVICE_ENDPOINT') for service in services: serviceDict = dict() serviceDict['hostname'] = getText(service.getElementsByTagName('HOSTNAME')[0].childNodes) try: serviceDict['service_id'] = getText(service.getElementsByTagName('PRIMARY_KEY')[0].childNodes) except IndexError: serviceDict['service_id'] = service.getAttribute('PRIMARY_KEY') serviceDict['type'] = getText(service.getElementsByTagName('SERVICE_TYPE')[0].childNodes) serviceDict['monitored'] = getText(service.getElementsByTagName('NODE_MONITORED')[0].childNodes) serviceDict['production'] = getText(service.getElementsByTagName('IN_PRODUCTION')[0].childNodes) if self.extensions: extensions = service.getElementsByTagName('EXTENSIONS')[0].childNodes serviceDict['extensions'] = self._parse_extensions(extensions) groupList[groupId]['services'].append(serviceDict) except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as e: logger.error(module_class_name(self) + 'Customer:%s Job:%s : Error parsing feed %s - %s' % (logger.customer, logger.job, self._o.scheme + '://' + self._o.netloc + SERVGROUPPI, repr(e).replace('\'','').replace('\"', ''))) raise e
def get_profiles(self): try: fetched_profiles = self._fetch() target_profiles = filter(lambda profile: profile['name'] in self.profiles, fetched_profiles) profile_list = list() if len(target_profiles) == 0: self.state = False logger.error('Customer:' + self.customer + ' Job:' + self.job + ': No profiles {0} were found!'.format(', '.join(self.profiles))) raise SystemExit(1) for profile in target_profiles: for service in profile['services']: for metric in service['metrics']: if self.namespace: profile_name = '{0}.{1}'.format(self.namespace, profile['name']) else: profile_name = profile['name'] profile_list.append({ 'profile': profile_name, 'metric': metric, 'service': service['service'] }) except (KeyError, IndexError, AttributeError, TypeError) as e: self.state = False logger.error(module_class_name(self) + ' Customer:%s : Error parsing feed %s - %s' % (logger.customer, self.host + API_PATH, repr(e).replace('\'', '').replace('\"', ''))) return [] else: return self._format(profile_list)
def _fetch(self): try: res = input.connection(logger, module_class_name(self), globopts, 'https', self.host, API_PATH, custauth={'WebAPIToken'.lower(): self.token}) if not res: raise input.ConnectorError() json_data = input.parse_json(logger, module_class_name(self), globopts, res, self.host + API_PATH) if not json_data or not json_data.get('data', False): raise input.ConnectorError() return json_data['data'] except input.ConnectorError: self.state = False
def getProfiles(self): filteredProfiles = re.split('\s*,\s*', poemopts['FetchProfilesList'.lower()]) availableVOs = [vo for k, v in cpoem.get_servers().items() for vo in v] try: validProfiles = self.loadValidProfiles(filteredProfiles) ngiall = cpoem.get_allngi() profileList = [] profileListAvro = [] for profile in validProfiles.values(): for metric in profile['metrics']: profileListAvro.append({'profile' : profile['namespace'] + '.' + profile['name'], \ 'metric' : metric['name'], \ 'service' : metric['service_flavour'], \ 'vo' : profile['vo'], \ 'fqan' : metric['fqan']}) if not self._nopf: nummoninst = 0 for server, profiles in ngiall.items(): ngis = ['ALL'] servers = [server] defaultProfiles = profiles for vo in availableVOs: serverProfiles = [] if len(defaultProfiles) > 0: serverProfiles = defaultProfiles else: serverProfiles = self.loadProfilesFromServer( servers[0], vo, filteredProfiles).keys() for profile in serverProfiles: if profile.upper() in validProfiles.keys(): for ngi in ngis: for server in servers: profileList.extend( self.createProfileEntries( server, ngi, validProfiles[profile.upper()])) except (KeyError, IndexError, AttributeError, TypeError) as e: self.state = False logger.error( module_class_name(self) + ' Customer:%s : Error parsing feed %s - %s' % (logger.customer, self._urlfeed, repr(e).replace('\'', '').replace('\"', ''))) return [], [] else: return profileList if profileList else [], profileListAvro
def getWeights(self): try: res = input.connection(logger, module_class_name(self), globopts, self._o.scheme, self._o.netloc, self._o.path) if not res: raise input.ConnectorError() json_data = input.parse_json( logger, module_class_name(self), globopts, res, self._o.scheme + '://' + self._o.netloc + self._o.path) if not json_data: raise input.ConnectorError() except input.ConnectorError: self.state = False return [] else: try: weights = dict() for ngi in json_data: for site in ngi['site']: key = site['id'] if 'ComputationPower' in site: val = site['ComputationPower'] else: logger.warn( module_class_name(self) + ': No ComputationPower value for NGI:%s Site:%s' % (ngi['ngi'], site['id'])) val = '0' weights[key] = val return weights except (KeyError, IndexError) as e: self.state = False logger.error( module_class_name(self) + ': Error parsing feed %s - %s' % (self._o.scheme + '://' + self._o.netloc + self._o.path, repr(e).replace('\'', '')))
def _get_sites_internal(self, siteList, scope, doc): try: sites = doc.getElementsByTagName('SITE') for site in sites: siteName = site.getAttribute('NAME') if siteName not in siteList: siteList[siteName] = {'site': siteName} siteList[siteName]['infrastructure'] = getText(site.getElementsByTagName('PRODUCTION_INFRASTRUCTURE')[0].childNodes) siteList[siteName]['certification'] = getText(site.getElementsByTagName('CERTIFICATION_STATUS')[0].childNodes) siteList[siteName]['ngi'] = getText(site.getElementsByTagName('ROC')[0].childNodes) siteList[siteName]['scope'] = scope.split('=')[1] except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as e: logger.error(module_class_name(self) + 'Customer:%s Job:%s : Error parsing feed %s - %s' % (logger.customer, logger.job, self._o.scheme + '://' + self._o.netloc + SITESPI, repr(e).replace('\'','').replace('\"', ''))) raise e
def send(self, schema, msgtype, date, msglist): def _avro_serialize(msg): opened_schema = load_schema(schema) avro_writer = DatumWriter(opened_schema) bytesio = BytesIO() encoder = BinaryEncoder(bytesio) if isinstance(msg, list): for m in msg: avro_writer.write(m, encoder) else: avro_writer.write(msg, encoder) return bytesio.getvalue() if self.packsingle: self.bulk = 1 msg = AmsMessage(attributes={ 'partition_date': date, 'report': self.report, 'type': msgtype }, data=_avro_serialize(msglist)) msgs = [msg] else: msgs = map( lambda m: AmsMessage(attributes={ 'partition_date': date, 'report': self.report, 'type': msgtype }, data=_avro_serialize(m)), msglist) if self._send( self.logger, module_class_name(self), { 'ConnectionRetry'.lower(): self.retry, 'ConnectionTimeout'.lower(): self.timeout, 'ConnectionSleepRetry'.lower(): self.sleepretry }, msgs, self.bulk, self): return True
def _get_service_endpoints(self, serviceList, scope, doc): try: services = doc.getElementsByTagName('SERVICE_ENDPOINT') for service in services: serviceId = '' if service.getAttributeNode('PRIMARY_KEY'): serviceId = str(service.attributes['PRIMARY_KEY'].value) if serviceId not in serviceList: serviceList[serviceId] = {} serviceList[serviceId]['hostname'] = getText(service.getElementsByTagName('HOSTNAME')[0].childNodes) serviceList[serviceId]['type'] = getText(service.getElementsByTagName('SERVICE_TYPE')[0].childNodes) serviceList[serviceId]['monitored'] = getText(service.getElementsByTagName('NODE_MONITORED')[0].childNodes) serviceList[serviceId]['production'] = getText(service.getElementsByTagName('IN_PRODUCTION')[0].childNodes) serviceList[serviceId]['site'] = getText(service.getElementsByTagName('SITENAME')[0].childNodes) serviceList[serviceId]['roc'] = getText(service.getElementsByTagName('ROC_NAME')[0].childNodes) serviceList[serviceId]['scope'] = scope.split('=')[1] serviceList[serviceId]['sortId'] = serviceList[serviceId]['hostname'] + '-' + serviceList[serviceId]['type'] + '-' + serviceList[serviceId]['site'] except (KeyError, IndexError, TypeError, AttributeError, AssertionError) as e: logger.error(module_class_name(self) + 'Customer:%s Job:%s : Error parsing feed %s - %s' % (logger.customer, logger.job, self._o.scheme + '://' + self._o.netloc + SERVENDPI, repr(e).replace('\'','').replace('\"', ''))) raise e