def getWeights(self):
        try:
            res = input.connection(logger, module_class_name(self), globopts,
                                   self._o.scheme, self._o.netloc,
                                   self._o.path)
            if not res:
                raise input.ConnectorError()

            json_data = input.parse_json(logger, module_class_name(self), globopts, res,
                                         self._o.scheme + '://' + self._o.netloc + self._o.path)

            if not json_data:
                raise input.ConnectorError()

        except input.ConnectorError:
            self.state = False
            return []

        else:
            try:
                weights = dict()
                for ngi in json_data:
                    for site in ngi['site']:
                        key = site['id']
                        if 'ComputationPower' in site:
                            val = site['ComputationPower']
                        else:
                            logger.warn(module_class_name(self) + ': No ComputationPower value for NGI:%s Site:%s' % (ngi['ngi'] ,site['id']))
                            val = '0'
                        weights[key] = val
                return weights
            except (KeyError, IndexError) as e:
                self.state = False
                logger.error(module_class_name(self) + ': Error parsing feed %s - %s' % (self._o.scheme + '://' + self._o.netloc + self._o.path,
                                                                                         repr(e).replace('\'','')))
    def loadProfilesFromServer(self, server, vo, namespace, Profiles):
        validProfiles = dict()

        doFilterProfiles = False
        if len(Profiles) > 0:
            doFilterProfiles = True

        if not server.startswith('http'):
            server = 'https://' + server

        self._urlfeed = server + MIPAPI
        for i in vo:
            self._urlfeed = self._urlfeed + 'vo_name=' + i + '&'

        for j in Profiles:
            self._urlfeed = self._urlfeed + 'profile=' + j + '&'

        self._urlfeed = self._urlfeed[:-1]

        Profiles = [namespace.upper() + '.' + Profiles[i] for i in range(len(Profiles))]

        o = urlparse.urlparse(self._urlfeed, allow_fragments=True)

        try:
            assert o.scheme != '' and o.netloc != '' and o.path != ''
        except AssertionError:
            logger.error('Customer:%s Invalid POEM PI URL: %s' % (logger.customer, self._urlfeed))
            raise SystemExit(1)

        logger.info('Customer:%s Server:%s VO:%s' % (logger.customer, o.netloc, vo[0] if len(vo) == 1 else\
                                                     '{0}'.format(','.join(vo))))

        try:
            res = input.connection(logger, module_class_name(self), globopts,
                                   o.scheme, o.netloc, o.path + '?' + o.query)
            if not res:
                raise input.ConnectorError()

            json_data = input.parse_json(logger, module_class_name(self),
                                         globopts, res, self._urlfeed)

            if not json_data:
                raise input.ConnectorError()

        except input.ConnectorError:
            self.state = False

        else:
            try:
                for profile in json_data[0]['profiles']:
                    if not doFilterProfiles or profile['namespace'].upper()+'.'+profile['name'] in Profiles:
                        validProfiles[profile['namespace'].upper()+'.'+profile['name']] = profile

            except Exception as e:
                raise e

            else:
                return validProfiles
    def _get_xmldata(self, scope, pi):
        res = input.connection(logger, module_class_name(self), globopts,
                               self._o.scheme, self._o.netloc, pi + scope, custauth=self.custauth)
        if not res:
            raise input.ConnectorError()

        doc = input.parse_xml(logger, module_class_name(self), globopts, res,
                              self._o.scheme + '://' + self._o.netloc + pi)
        return doc
示例#4
0
    def _get_xmldata(self, scope, pi):
        res = input.connection(logger, module_class_name(self), globopts,
                               self._o.scheme, self._o.netloc, pi + scope, custauth=self.custauth)
        if not res:
            raise input.ConnectorError()

        doc = input.parse_xml(logger, module_class_name(self), globopts, res,
                              self._o.scheme + '://' + self._o.netloc + pi)
        return doc
示例#5
0
    def getDowntimes(self, start, end):
        filteredDowntimes = list()

        try:
            res = input.connection(logger, module_class_name(self), globopts, self._o.scheme, self._o.netloc,
                                   DOWNTIMEPI + '&windowstart=%s&windowend=%s' % (start.strftime(self.argDateFormat),
                                                                                  end.strftime(self.argDateFormat)),
                                   custauth=self.custauth)
            if not res:
                raise input.ConnectorError()

            doc = input.parse_xml(logger, module_class_name(self), globopts,
                                  res, self._o.scheme + '://' + self._o.netloc + DOWNTIMEPI)

            if not doc:
                raise input.ConnectorError()

        except input.ConnectorError:
            self.state = False
            return []

        else:
            downtimes = doc.getElementsByTagName('DOWNTIME')
            try:
                for downtime in downtimes:
                    classification = downtime.getAttributeNode('CLASSIFICATION').nodeValue
                    hostname = getText(downtime.getElementsByTagName('HOSTNAME')[0].childNodes)
                    serviceType = getText(downtime.getElementsByTagName('SERVICE_TYPE')[0].childNodes)
                    startStr = getText(downtime.getElementsByTagName('FORMATED_START_DATE')[0].childNodes)
                    endStr = getText(downtime.getElementsByTagName('FORMATED_END_DATE')[0].childNodes)
                    severity = getText(downtime.getElementsByTagName('SEVERITY')[0].childNodes)

                    startTime = datetime.datetime.strptime(startStr, self.WSDateFormat)
                    endTime = datetime.datetime.strptime(endStr, self.WSDateFormat)

                    if (startTime < start):
                        startTime = start
                    if (endTime > end):
                        endTime = end

                    if classification == 'SCHEDULED' and severity == 'OUTAGE':
                        dt = dict()
                        dt['hostname'] = hostname
                        dt['service'] = serviceType
                        dt['start_time'] = startTime.strftime('%Y-%m-%d %H:%M').replace(' ', 'T', 1).replace(' ', ':') + ':00Z'
                        dt['end_time'] = endTime.strftime('%Y-%m-%d %H:%M').replace(' ', 'T', 1).replace(' ', ':') + ':00Z'
                        filteredDowntimes.append(dt)

            except (KeyError, IndexError, AttributeError, TypeError, AssertionError) as e:
                self.state = False
                logger.error(module_class_name(self) + 'Customer:%s Job:%s : Error parsing feed %s - %s' % (logger.customer, logger.job,
                                                                                                            self._o.scheme + '://' + self._o.netloc + DOWNTIMEPI,
                                                                                                            repr(e).replace('\'','')))
                return []
            else:
                return filteredDowntimes
    def loadProfilesFromServer(self, server, vo, filterProfiles):
        validProfiles = dict()

        doFilterProfiles = False
        if len(filterProfiles) > 0:
            doFilterProfiles = True

        if not server.startswith('http'):
            server = 'https://' + server

        self._urlfeed = server + MIPAPI + vo
        o = urlparse.urlparse(self._urlfeed, allow_fragments=True)

        try:
            assert o.scheme != '' and o.netloc != '' and o.path != ''
        except AssertionError:
            logger.error('Customer:%s Invalid POEM PI URL: %s' %
                         (logger.customer, self._urlfeed))
            raise SystemExit(1)

        logger.info('Customer:%s Server:%s VO:%s' %
                    (logger.customer, o.netloc, vo))

        try:
            res = input.connection(logger, module_class_name(self), globopts,
                                   o.scheme, o.netloc, o.path + '?' + o.query)
            if not res:
                raise input.ConnectorError()

            json_data = input.parse_json(logger, module_class_name(self),
                                         globopts, res, self._urlfeed)

            if not json_data:
                raise input.ConnectorError()

        except input.ConnectorError:
            self.state = False

        else:
            try:
                for profile in json_data[0]['profiles']:
                    if not doFilterProfiles or profile['namespace'].upper(
                    ) + '.' + profile['name'] in filterProfiles:
                        validProfiles[profile['namespace'].upper() + '.' +
                                      profile['name']] = profile

            except Exception as e:
                raise e

            else:
                return validProfiles
示例#7
0
    def _fetch(self):
        try:
            res = input.connection(logger, module_class_name(self), globopts,
                                   'https', self.host, API_PATH,
                                   custauth={'WebAPIToken'.lower(): self.token})
            if not res:
                raise input.ConnectorError()

            json_data = input.parse_json(logger, module_class_name(self),
                                         globopts, res, self.host + API_PATH)

            if not json_data or not json_data.get('data', False):
                raise input.ConnectorError()

            return json_data['data']

        except input.ConnectorError:
            self.state = False
示例#8
0
    def getWeights(self):
        try:
            res = input.connection(logger, module_class_name(self), globopts,
                                   self._o.scheme, self._o.netloc,
                                   self._o.path)
            if not res:
                raise input.ConnectorError()

            json_data = input.parse_json(
                logger, module_class_name(self), globopts, res,
                self._o.scheme + '://' + self._o.netloc + self._o.path)

            if not json_data:
                raise input.ConnectorError()

        except input.ConnectorError:
            self.state = False
            return []

        else:
            try:
                weights = dict()
                for ngi in json_data:
                    for site in ngi['site']:
                        key = site['id']
                        if 'ComputationPower' in site:
                            val = site['ComputationPower']
                        else:
                            logger.warn(
                                module_class_name(self) +
                                ': No ComputationPower value for NGI:%s Site:%s'
                                % (ngi['ngi'], site['id']))
                            val = '0'
                        weights[key] = val
                return weights
            except (KeyError, IndexError) as e:
                self.state = False
                logger.error(
                    module_class_name(self) + ': Error parsing feed %s - %s' %
                    (self._o.scheme + '://' + self._o.netloc + self._o.path,
                     repr(e).replace('\'', '')))
def main():
    parser = argparse.ArgumentParser(
        description="""Fetch and construct entities from NEANIAS feed""")
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    for cust in confcust.get_customers():
        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            jobdir = confcust.get_fulldir(cust, job)
            logger.customer = confcust.get_custname(cust)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)
            fetchtype = confcust.get_fetchtype(job)

            state = None
            logger.job = job
            logger.customer = custname

            uidservtype = confcust.pass_uidserviceendpoints(job)
            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')

            feeds = confcust.get_mapfeedjobs(sys.argv[0])
            if is_feed(feeds.keys()[0]):
                remote_topo = urlparse(feeds.keys()[0])
                res = input.connection(logger, 'NEANIAS', globopts,
                                       remote_topo.scheme, remote_topo.netloc,
                                       remote_topo.path)
                if not res:
                    raise input.ConnectorError()
                doc = input.parse_json(
                    logger, 'NEANIAS', globopts, res, remote_topo.scheme +
                    '://' + remote_topo.netloc + remote_topo.path)
                eosc = EOSCReader(doc, uidservtype, fetchtype)
                group_groups = eosc.get_groupgroups()
                group_endpoints = eosc.get_groupendpoints()
                state = True
            else:
                try:
                    with open(feeds.keys()[0]) as fp:
                        js = json.load(fp)
                        eosc = EOSCReader(js, uidservtype, fetchtype)
                        group_groups = eosc.get_groupgroups()
                        group_endpoints = eosc.get_groupendpoints()
                        state = True
                except IOError as exc:
                    logger.error(
                        'Customer:%s Job:%s : Problem opening %s - %s' %
                        (logger.customer, logger.job, feeds.keys()[0],
                         repr(exc)))
                    state = False

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, state,
                                   globopts['InputStateDays'.lower()])

            if not state:
                continue

            numge = len(group_endpoints)
            numgg = len(group_groups)

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                         'group_groups', partdate, group_groups)

                ams.send(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    'group_endpoints', partdate, group_endpoints)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                    filename)
                ret, excep = avro.write(group_groups)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    filename)
                ret, excep = avro.write(group_endpoints)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job +
                        ' Fetched Endpoints:%d' % (numge) + ' Groups(%s):%d' %
                        (fetchtype, numgg))