Exemple #1
0
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(description='Fetch downtimes from GOCDB for given date')
    parser.add_argument('-d', dest='date', nargs=1, metavar='YEAR-MONTH-DAY', required=True)
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))
    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed='https://goc.egi.eu/gocdbpi/')


    if len(args.date) == 0:
        print parser.print_help()
        raise SystemExit(1)

    # calculate start and end times
    try:
        start = datetime.datetime.strptime(args.date[0], '%Y-%m-%d')
        end = datetime.datetime.strptime(args.date[0], '%Y-%m-%d')
        timestamp = start.strftime('%Y_%m_%d')
        start = start.replace(hour=0, minute=0, second=0)
        end = end.replace(hour=23, minute=59, second=59)
    except ValueError as e:
        logger.error(e)
        raise SystemExit(1)


    for feed, jobcust in feeds.items():
        customers = set(map(lambda jc: confcust.get_custname(jc[1]), jobcust))
        customers = customers.pop() if len(customers) == 1 else '({0})'.format(','.join(customers))
        jobs = set(map(lambda jc: jc[0], jobcust))
        jobs = jobs.pop() if len(jobs) == 1 else '({0})'.format(','.join(jobs))
        logger.job = jobs
        logger.customer = customers

        auth_custopts = confcust.get_authopts(feed, jobcust)
        auth_opts = cglob.merge_opts(auth_custopts, 'authentication')
        auth_complete, missing = cglob.is_complete(auth_opts, 'authentication')
        if auth_complete:
            gocdb = GOCDBReader(feed, auth=auth_opts)
            dts = gocdb.getDowntimes(start, end)
        else:
            logger.error('Customer:%s Jobs:%s %s options incomplete, missing %s'
                         % (logger.customer, logger.job, 'authentication',
                            ''.join(missing)))
            continue

        for job, cust in jobcust:
            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            logger.customer = confcust.get_custname(cust)
            logger.job = job

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s Job:%s %s options incomplete, missing %s' % (logger.customer, job, 'ams', ' '.join(missopt)))
                continue

            output.write_state(sys.argv[0], jobstatedir, gocdb.state, globopts['InputStateDays'.lower()], timestamp)

            if not gocdb.state:
                continue

            if eval(globopts['GeneralPublishAms'.lower()]):
                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasDowntimes'.lower()], 'downtimes',
                         timestamp.replace('_', '-'), dts)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                filename = filename_date(logger, globopts['OutputDowntimes'.lower()], jobdir, stamp=timestamp)
                avro = output.AvroWriter(globopts['AvroSchemasDowntimes'.lower()], filename)
                ret, excep = avro.write(dts)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

        if gocdb.state:
            custs = set([cust for job, cust in jobcust])
            for cust in custs:
                jobs = [job for job, lcust in jobcust if cust == lcust]
                logger.info('Customer:%s Jobs:%s Fetched Date:%s Endpoints:%d' % (confcust.get_custname(cust),
                                                                                  jobs[0] if len(jobs) == 1 else '({0})'.format(','.join(jobs)),
                                                                                  args.date[0], len(dts)))
def main():
    parser = argparse.ArgumentParser(
        description="""Fetch and construct entities from NEANIAS feed""")
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    for cust in confcust.get_customers():
        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            jobdir = confcust.get_fulldir(cust, job)
            logger.customer = confcust.get_custname(cust)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)
            fetchtype = confcust.get_fetchtype(job)

            state = None
            logger.job = job
            logger.customer = custname

            uidservtype = confcust.pass_uidserviceendpoints(job)
            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')

            feeds = confcust.get_mapfeedjobs(sys.argv[0])
            if is_feed(feeds.keys()[0]):
                remote_topo = urlparse(feeds.keys()[0])
                res = input.connection(logger, 'NEANIAS', globopts,
                                       remote_topo.scheme, remote_topo.netloc,
                                       remote_topo.path)
                if not res:
                    raise input.ConnectorError()
                doc = input.parse_json(
                    logger, 'NEANIAS', globopts, res, remote_topo.scheme +
                    '://' + remote_topo.netloc + remote_topo.path)
                eosc = EOSCReader(doc, uidservtype, fetchtype)
                group_groups = eosc.get_groupgroups()
                group_endpoints = eosc.get_groupendpoints()
                state = True
            else:
                try:
                    with open(feeds.keys()[0]) as fp:
                        js = json.load(fp)
                        eosc = EOSCReader(js, uidservtype, fetchtype)
                        group_groups = eosc.get_groupgroups()
                        group_endpoints = eosc.get_groupendpoints()
                        state = True
                except IOError as exc:
                    logger.error(
                        'Customer:%s Job:%s : Problem opening %s - %s' %
                        (logger.customer, logger.job, feeds.keys()[0],
                         repr(exc)))
                    state = False

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, state,
                                   globopts['InputStateDays'.lower()])

            if not state:
                continue

            numge = len(group_endpoints)
            numgg = len(group_groups)

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                         'group_groups', partdate, group_groups)

                ams.send(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    'group_endpoints', partdate, group_endpoints)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                    filename)
                ret, excep = avro.write(group_groups)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    filename)
                ret, excep = avro.write(group_endpoints)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job +
                        ' Fetched Endpoints:%d' % (numge) + ' Groups(%s):%d' %
                        (fetchtype, numgg))
import unittest
import mock
import asyncio

from aiohttp import client_exceptions
from aiohttp import http_exceptions

from argo_egi_connectors.io.http import SessionWithRetry
from argo_egi_connectors.log import Logger
from argo_egi_connectors.exceptions import ConnectorHttpError

logger = Logger('test_topofeed.py')
CUSTOMER_NAME = 'CUSTOMERFOO'


class async_test(object):
    """
    Decorator to create asyncio context for asyncio methods or functions.
    """
    def __init__(self, test_method):
        self.test_method = test_method

    def __call__(self, *args, **kwargs):
        test_obj = args[0]
        test_obj.loop.run_until_complete(self.test_method(*args, **kwargs))


class mockHttpGetEmpty(mock.AsyncMock):
    async def __aenter__(self, *args, **kwargs):
        mock_obj = mock.AsyncMock()
        mock_obj.text.return_value = ''
Exemple #4
0
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(
        description="""Fetch weights information from Gstat provider
                                                    for every job listed in customer.conf"""
    )
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed=VAPORPI)

    j = 0
    for feed, jobcust in feeds.items():
        weights = Vapor(feed)
        datawr = None

        customers = set(map(lambda jc: confcust.get_custname(jc[1]), jobcust))
        customers = customers.pop() if len(customers) == 1 else '({0})'.format(
            ','.join(customers))
        sjobs = set(map(lambda jc: jc[0], jobcust))
        jobs = list(sjobs)[0] if len(sjobs) == 1 else '({0})'.format(
            ','.join(sjobs))
        logger.job = jobs
        logger.customer = customers

        for job, cust in jobcust:
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            write_empty = confcust.send_empty(sys.argv[0], cust)

            if not write_empty:
                w = weights.getWeights()
            else:
                w = []
                weights.state = True

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)

            custname = confcust.get_custname(cust)
            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' %
                             (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, weights.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, weights.state,
                                   globopts['InputStateDays'.lower()])

            if not weights.state:
                continue

            datawr = data_out(w)
            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasWeights'.lower()], 'weights',
                         partdate, datawr)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger,
                                             globopts['OutputWeights'.lower()],
                                             jobdir,
                                             fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger,
                                             globopts['OutputWeights'.lower()],
                                             jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasWeights'.lower()], filename)
                ret, excep = avro.write(datawr)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            j += 1

        if datawr or write_empty:
            custs = set([cust for job, cust in jobcust])
            for cust in custs:
                jobs = [job for job, lcust in jobcust if cust == lcust]
                logger.info(
                    'Customer:%s Jobs:%s Sites:%d' %
                    (confcust.get_custname(cust), jobs[0] if len(jobs) == 1
                     else '({0})'.format(','.join(jobs)), len(datawr)))
Exemple #5
0
import unittest
import json

from argo_egi_connectors.log import Logger
from argo_egi_connectors.mesh.storage_element_path import attach_sepath_topodata

from bonsai import LDAPEntry

logger = Logger('test_contactfeed.py')
CUSTOMER_NAME = 'CUSTOMERFOO'


class MeshSePathAndTopodata(unittest.TestCase):
    def setUp(self):
        logger.customer = CUSTOMER_NAME
        with open('tests/sample-bdii_sepaths.json') as fh:
            content = fh.read()
            self.sample_ldap = json.loads(content)
        self.maxDiff = None
        self.bdiiopts = {
            'bdii':
            'True',
            'bdiihost':
            'bdii.egi.cro-ngi.hr',
            'bdiiport':
            '2170',
            'bdiiqueryattributessepath':
            'GlueVOInfoAccessControlBaseRule GlueVOInfoPath',
            'bdiiqueryattributessrm':
            'GlueServiceEndpoint',
            'bdiiquerybase':
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(
        description='Fetch metric profile for every job of the customer')
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    loop = uvloop.new_event_loop()
    asyncio.set_event_loop(loop)

    for cust in confcust.get_customers():
        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            profiles = confcust.get_profiles(job)
            webapi_custopts = confcust.get_webapiopts(cust)
            webapi_opts = cglob.merge_opts(webapi_custopts, 'webapi')
            webapi_complete, missopt = cglob.is_complete(webapi_opts, 'webapi')

            if not webapi_complete:
                logger.error(
                    'Customer:%s Job:%s %s options incomplete, missing %s' %
                    (custname, logger.job, 'webapi', ' '.join(missopt)))
                continue

            try:
                res = loop.run_until_complete(
                    fetch_data(webapi_opts['webapihost'],
                               webapi_opts['webapitoken']))

                fetched_profiles = parse_source(res, profiles,
                                                confcust.get_namespace(job))

                loop.run_until_complete(
                    write_state(cust, job, confcust, fixed_date, True))

                if eval(globopts['GeneralWriteAvro'.lower()]):
                    write_avro(cust, job, confcust, fixed_date,
                               fetched_profiles)

                logger.info('Customer:' + custname + ' Job:' + job +
                            ' Profiles:%s Tuples:%d' %
                            (', '.join(profiles), len(fetched_profiles)))

            except (ConnectorHttpError, KeyboardInterrupt,
                    ConnectorParseError) as exc:
                logger.error(repr(exc))
                loop.run_until_complete(
                    write_state(cust, job, confcust, fixed_date, False))
Exemple #7
0
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(
        description="""Fetch weights information from Gstat provider
                                                    for every job listed in customer.conf"""
    )
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed=VAPORPI)

    loop = uvloop.new_event_loop()
    asyncio.set_event_loop(loop)

    for feed, jobcust in feeds.items():
        customers = set(map(lambda jc: confcust.get_custname(jc[1]), jobcust))
        customers = customers.pop() if len(customers) == 1 else '({0})'.format(
            ','.join(customers))
        sjobs = set(map(lambda jc: jc[0], jobcust))
        jobs = list(sjobs)[0] if len(sjobs) == 1 else '({0})'.format(
            ','.join(sjobs))
        logger.job = jobs
        logger.customer = customers

        try:
            res = loop.run_until_complete(fetch_data(feed))
            weights = parse_source(res)

            for job, cust in jobcust:
                logger.customer = confcust.get_custname(cust)
                logger.job = job

                write_empty = confcust.send_empty(sys.argv[0], cust)

                if write_empty:
                    weights = []

                webapi_opts = get_webapi_opts(cust, job, cglob, confcust)

                if eval(globopts['GeneralPublishWebAPI'.lower()]):
                    loop.run_until_complete(
                        send_webapi(job, confcust, webapi_opts, fixed_date,
                                    weights))

                if eval(globopts['GeneralWriteAvro'.lower()]):
                    write_avro(cust, job, confcust, fixed_date, weights)

                loop.run_until_complete(
                    write_state(cust, job, confcust, fixed_date, True))

                if weights or write_empty:
                    custs = set([cust for job, cust in jobcust])
                    for cust in custs:
                        jobs = [job for job, lcust in jobcust if cust == lcust]
                        logger.info(
                            'Customer:%s Jobs:%s Sites:%d' %
                            (confcust.get_custname(cust), jobs[0] if len(jobs)
                             == 1 else '({0})'.format(','.join(jobs)),
                             len(weights)))

        except (ConnectorHttpError, ConnectorParseError,
                KeyboardInterrupt) as exc:
            logger.error(repr(exc))
            for job, cust in jobcust:
                loop.run_until_complete(
                    write_state(cust, job, confcust, fixed_date, False))
def main():
    parser = optparse.OptionParser(
        description="""Filters consumer messages based on various criteria
                                                    (allowed NGIs, service flavours, metrics...)"""
    )
    parser.add_option('-g',
                      dest='gloconf',
                      nargs=1,
                      metavar='global.conf',
                      help='path to global configuration file',
                      type=str)

    group = optparse.OptionGroup(parser, 'Compute Engine usage')
    group.add_option('-d', dest='date', nargs=1, metavar='YEAR-MONTH-DAY')
    parser.add_option_group(group)
    group = optparse.OptionGroup(parser, 'Debugging usage')
    group.add_option('-c',
                     dest='cfile',
                     default=None,
                     metavar='consumer_log_YEAR-MONTH-DAY.avro')
    group.add_option('-p',
                     dest='pfile',
                     default=None,
                     metavar='poem_sync_YEAR-MONTH-DAY.out')
    group.add_option('-o',
                     dest='ofile',
                     default=None,
                     metavar='output_YEAR-MONTH-DAY.avro')
    parser.add_option_group(group)

    (options, args) = parser.parse_args()

    global logger
    logger = Logger(os.path.basename(sys.argv[0]))

    confpath = options.gloconf if options.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    global globopts
    globopts = cglob.parse()

    if options.cfile and options.date:
        parser.print_help()
        raise SystemExit(1)
    elif options.cfile:
        fname = options.cfile
        date = options.cfile.split('_')[-1]
        date = date.split('.')[0]
        date = date.split('-')
    elif options.date:
        date = options.date.split('-')
    else:
        parser.print_help()
        raise SystemExit(1)

    if len(date) == 0 or len(date) != 3:
        logger.error(
            'Consumer file does not end with correctly formatted date')
        parser.print_help()
        raise SystemExit(1)

    year, month, day = date

    # avro files
    if options.cfile:
        inputFile = options.cfile
    else:
        inputFile = filename_date(
            logger,
            globopts['PrefilterConsumerFilePath'.lower()],
            '',
            stamp=year + '-' + month + '-' + day)
    if options.ofile:
        fname = options.ofile + '_DATE.avro'
        outputFile = filename_date(logger,
                                   fname,
                                   '',
                                   stamp=year + '_' + month + '_' + day)
    else:
        outputFile = filename_date(logger,
                                   globopts['OutputPrefilter'.lower()],
                                   '',
                                   stamp=year + '_' + month + '_' + day)

    try:
        schema = avro.schema.parse(
            open(globopts['AvroSchemasPrefilter'.lower()]).read())
        writer = DataFileWriter(open(outputFile, "w"), DatumWriter(), schema)
        reader = DataFileReader(open(inputFile, "r"), DatumReader())
    except IOError as e:
        logger.error(str(e))
        raise SystemExit(1)

    # load poem data
    if options.pfile:
        ngis = loadNGIs(poemfile=options.pfile)
        profiles = loadFilteredProfiles(poemfile=options.pfile)
    else:
        ngis = loadNGIs(year, month, day)
        profiles = loadFilteredProfiles(year, month, day)
    nameMapping = loadNameMapping(year, month, day)

    s = time.time()
    msgs, msgswrit, msgsfilt, falsemonhost, falseroc, falseprofile = prefilterit(
        reader, writer, ngis, profiles, nameMapping)
    e = time.time()

    logger.info(
        'ExecTime:%.2fs ConsumerDate:%s Read:%d Written:%d Filtered:%d(Monitoring_Host:%d,ROC:%d,ServiceTypes_Metrics:%d)'
        % (round(e - s, 2), year + '-' + month + '-' + day, msgs, msgswrit,
           msgsfilt, falsemonhost, falseroc, falseprofile))

    reader.close()
    writer.close()
Exemple #9
0
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(
        description='Fetch downtimes from GOCDB for given date')
    parser.add_argument('-d',
                        dest='date',
                        nargs=1,
                        metavar='YEAR-MONTH-DAY',
                        required=True)
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))
    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feed = confcust.get_topofeed()
    logger.customer = confcust.get_custname()

    if len(args.date) == 0:
        print(parser.print_help())
        raise SystemExit(1)

    # calculate start and end times
    try:
        start = datetime.datetime.strptime(args.date[0], '%Y-%m-%d')
        end = datetime.datetime.strptime(args.date[0], '%Y-%m-%d')
        timestamp = start.strftime('%Y_%m_%d')
        start = start.replace(hour=0, minute=0, second=0)
        end = end.replace(hour=23, minute=59, second=59)
    except ValueError as exc:
        logger.error(exc)
        raise SystemExit(1)

    uidservtype = confcust.get_uidserviceendpoints()

    auth_custopts = confcust.get_authopts()
    auth_opts = cglob.merge_opts(auth_custopts, 'authentication')
    auth_complete, missing = cglob.is_complete(auth_opts, 'authentication')
    if not auth_complete:
        missing_err = ''.join(missing)
        logger.error(
            'Customer:{} authentication options incomplete, missing {}'.format(
                logger.customer, missing_err))
        raise SystemExit(1)

    loop = uvloop.new_event_loop()
    asyncio.set_event_loop(loop)

    try:
        # we don't have multiple tenant definitions in one
        # customer file so we can safely assume one tenant/customer
        write_empty = confcust.send_empty(sys.argv[0])
        if not write_empty:
            res = loop.run_until_complete(
                fetch_data(feed, auth_opts, start, end))
            dts = parse_source(res, start, end, uidservtype)
        else:
            dts = []

        loop.run_until_complete(write_state(confcust, timestamp, True))

        webapi_opts = get_webapi_opts(cglob, confcust)

        if eval(globopts['GeneralPublishWebAPI'.lower()]):
            loop.run_until_complete(send_webapi(webapi_opts, args.date[0],
                                                dts))

        if dts or write_empty:
            cust = list(confcust.get_customers())[0]
            logger.info('Customer:%s Fetched Date:%s Endpoints:%d' %
                        (confcust.get_custname(cust), args.date[0], len(dts)))

        if eval(globopts['GeneralWriteAvro'.lower()]):
            write_avro(confcust, dts, timestamp)

    except (ConnectorHttpError, ConnectorParseError, KeyboardInterrupt) as exc:
        logger.error(repr(exc))
        loop.run_until_complete(write_state(confcust, timestamp, False))

    loop.close()
def main():
    global logger, globopts, confcust
    parser = argparse.ArgumentParser(description="""Fetch entities (ServiceGroups, Sites, Endpoints)
                                                    from GOCDB for every customer and job listed in customer.conf and write them
                                                    in an appropriate place""")
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], 'GOCDB', deffeed='https://goc.egi.eu/gocdbpi/')

    for feed, jobcust in feeds.items():
        scopes = confcust.get_feedscopes(feed, jobcust)
        paging = confcust.is_paginated(feed, jobcust)
        auth_custopts = confcust.get_authopts(feed, jobcust)
        auth_opts = cglob.merge_opts(auth_custopts, 'authentication')
        auth_complete, missing = cglob.is_complete(auth_opts, 'authentication')
        if auth_complete:
            gocdb = GOCDBReader(feed, scopes, paging, auth=auth_opts)
        else:
            logger.error('%s options incomplete, missing %s' % ('authentication', ' '.join(missing)))
            continue

        for job, cust in jobcust:
            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            global fetchtype, custname
            fetchtype = confcust.get_gocdb_fetchtype(job)
            custname = confcust.get_custname(cust)

            logger.customer = custname
            logger.job = job

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s Job:%s %s options incomplete, missing %s' % (custname, logger.job, 'ams', ' '.join(missopt)))
                continue

            if fetchtype == 'ServiceGroups':
                group_endpoints = gocdb.getGroupOfServices()
            else:
                group_endpoints = gocdb.getGroupOfEndpoints()
            group_groups = gocdb.getGroupOfGroups()

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, gocdb.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, gocdb.state,
                                   globopts['InputStateDays'.lower()])

            if not gocdb.state:
                continue

            numge = len(group_endpoints)
            numgg = len(group_groups)

            ggtags = confcust.get_gocdb_ggtags(job)
            getags = confcust.get_gocdb_getags(job)
            tf = TopoFilter(group_groups, group_endpoints, ggtags, getags)
            group_groups = tf.gg
            group_endpoints = tf.ge

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                         'group_groups', partdate, group_groups)

                ams.send(globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                         'group_endpoints', partdate, group_endpoints)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger, globopts['OutputTopologyGroupOfGroups'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputTopologyGroupOfGroups'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasTopologyGroupOfGroups'.lower()], filename)
                ret, excep = avro.write(group_groups)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

                if fixed_date:
                    filename = filename_date(logger, globopts['OutputTopologyGroupOfEndpoints'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputTopologyGroupOfEndpoints'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()], filename)
                ret, excep = avro.write(group_endpoints)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:'+custname+' Job:'+job+' Fetched Endpoints:%d' % (numge) +' Groups(%s):%d' % (fetchtype, numgg))
            if getags or ggtags:
                selstr = 'Customer:%s Job:%s Selected ' % (custname, job)
                selge, selgg = '', ''
                if getags:
                    for key, value in getags.items():
                        if isinstance(value, list):
                            value = '['+','.join(value)+']'
                        selge += '%s:%s,' % (key, value)
                    selstr += 'Endpoints(%s):' % selge[:len(selge) - 1]
                    selstr += '%d ' % (len(group_endpoints))
                if ggtags:
                    for key, value in ggtags.items():
                        if isinstance(value, list):
                            value = '['+','.join(value)+']'
                        selgg += '%s:%s,' % (key, value)
                    selstr += 'Groups(%s):' % selgg[:len(selgg) - 1]
                    selstr += '%d' % (len(group_groups))

                logger.info(selstr)
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(description='Fetch POEM profile for every job of the customer and write POEM expanded profiles needed for prefilter for EGI customer')
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    for cust in confcust.get_customers():

        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            poemserver = dict()
            profiles = confcust.get_profiles(job)
            namespace = confcust.get_namespace(job)
            poemserver[confcust.get_poemserver_host(job)] = confcust.get_poemserver_vo(job)

            poem = PoemReader(custname, job)
            psa = poem.getProfiles(profiles, namespace, poemserver)

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' % (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir,
                                   poem.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir,
                                   poem.state,
                                   globopts['InputStateDays'.lower()])

            if not poem.state:
                continue

            lfprofiles = gen_outprofiles(psa, profiles)

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasPoem'.lower()], 'metric_profile',
                         partdate, lfprofiles)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger, globopts['OutputPoem'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputPoem'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasPoem'.lower()], filename)
                ret, excep = avro.write(lfprofiles)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:'+custname+' Job:'+job+' Profiles:%s Tuples:%d' % (','.join(profiles), len(lfprofiles)))
Exemple #12
0
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(description='Fetch metric profile for every job of the customer')
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    for cust in confcust.get_customers():
        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            profiles = confcust.get_profiles(job)
            webapi_custopts = confcust.get_webapiopts(cust)
            webapi_opts = cglob.merge_opts(webapi_custopts, 'webapi')
            webapi_complete, missopt = cglob.is_complete(webapi_opts, 'webapi')

            if not webapi_complete:
                logger.error('Customer:%s Job:%s %s options incomplete, missing %s' % (custname, logger.job, 'webapi', ' '.join(missopt)))
                continue

            webapi = WebAPI(custname, job, profiles, confcust.get_namespace(job),
                            webapi_opts['webapihost'],
                            webapi_opts['webapitoken'])
            fetched_profiles = webapi.get_profiles()

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' % (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir,
                                   webapi.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir,
                                   webapi.state,
                                   globopts['InputStateDays'.lower()])

            if not webapi.state:
                continue

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasMetricProfile'.lower()], 'metric_profile',
                         partdate, fetched_profiles)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger, globopts['OutputMetricProfile'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputMetricProfile'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasMetricProfile'.lower()], filename)
                ret, excep = avro.write(fetched_profiles)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job + ' Profiles:%s Tuples:%d' % (', '.join(profiles), len(fetched_profiles)))
Exemple #13
0
def main():
    global logger, globopts, confcust
    parser = argparse.ArgumentParser(
        description="""Fetch entities (ServiceGroups, Sites, Endpoints)
                                                    from CSV topology feed for every customer and job listed in customer.conf and write them
                                                    in an appropriate place""")
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    topofeed = confcust.get_topofeed()
    uidservtype = confcust.get_uidserviceendpoints()
    topofetchtype = confcust.get_topofetchtype()
    custname = confcust.get_custname()
    logger.customer = custname

    auth_custopts = confcust.get_authopts()
    auth_opts = cglob.merge_opts(auth_custopts, 'authentication')
    auth_complete, missing = cglob.is_complete(auth_opts, 'authentication')
    if not auth_complete:
        logger.error('%s options incomplete, missing %s' %
                     ('authentication', ' '.join(missing)))
        raise SystemExit(1)

    loop = uvloop.new_event_loop()
    asyncio.set_event_loop(loop)

    try:
        group_endpoints, group_groups = list(), list()

        # fetch topology data concurrently in coroutines
        fetched_topology = loop.run_until_complete(
            fetch_data(topofeed, auth_opts))

        group_groups, group_endpoints = parse_source_topo(
            fetched_topology, custname, uidservtype)
        contacts = ParseContacts(logger,
                                 fetched_topology,
                                 uidservtype,
                                 is_csv=True).get_contacts()
        attach_contacts_topodata(logger, contacts, group_endpoints)

        loop.run_until_complete(write_state(confcust, fixed_date, True))

        webapi_opts = get_webapi_opts(cglob, confcust)

        numgg = len(group_groups)
        numge = len(group_endpoints)

        # send concurrently to WEB-API in coroutines
        if eval(globopts['GeneralPublishWebAPI'.lower()]):
            loop.run_until_complete(
                asyncio.gather(
                    send_webapi(webapi_opts, group_groups, 'groups',
                                fixed_date),
                    send_webapi(webapi_opts, group_endpoints, 'endpoints',
                                fixed_date)))

        if eval(globopts['GeneralWriteAvro'.lower()]):
            write_avro(confcust, group_groups, group_endpoints, fixed_date)

        logger.info('Customer:' + custname + ' Type:%s ' %
                    (','.join(topofetchtype)) + 'Fetched Endpoints:%d' %
                    (numge) + ' Groups:%d' % (numgg))

    except (ConnectorHttpError, ConnectorParseError, KeyboardInterrupt) as exc:
        logger.error(repr(exc))
        loop.run_until_complete(write_state(confcust, fixed_date, False))

    finally:
        loop.close()
def main():
    global logger, globopts, confcust
    parser = argparse.ArgumentParser(
        description="""Fetch entities (ServiceGroups, Sites, Endpoints)
                                                    from GOCDB for every customer and job listed in customer.conf and write them
                                                    in an appropriate place""")
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()
    pass_extensions = eval(globopts['GeneralPassExtensions'.lower()])

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    topofeed = confcust.get_topofeed()
    topofeedpaging = confcust.get_topofeedpaging()
    uidservtype = confcust.get_uidserviceendpoints()
    topofetchtype = confcust.get_topofetchtype()
    custname = confcust.get_custname()
    logger.customer = custname

    auth_custopts = confcust.get_authopts()
    auth_opts = cglob.merge_opts(auth_custopts, 'authentication')
    auth_complete, missing = cglob.is_complete(auth_opts, 'authentication')
    if not auth_complete:
        logger.error('%s options incomplete, missing %s' %
                     ('authentication', ' '.join(missing)))
        raise SystemExit(1)

    bdii_opts = get_bdii_opts(confcust)

    loop = uvloop.new_event_loop()
    asyncio.set_event_loop(loop)

    group_endpoints, group_groups = list(), list()
    parsed_site_contacts, parsed_servicegroups_contacts, parsed_serviceendpoint_contacts = None, None, None

    try:
        contact_coros = [
            fetch_data(topofeed + SITE_CONTACTS, auth_opts, False),
            fetch_data(topofeed + SERVICEGROUP_CONTACTS, auth_opts, False)
        ]
        contacts = loop.run_until_complete(
            asyncio.gather(*contact_coros, return_exceptions=True))

        exc_raised, exc = contains_exception(contacts)
        if exc_raised:
            raise ConnectorHttpError(repr(exc))

        parsed_site_contacts = parse_source_sitescontacts(
            contacts[0], custname)
        parsed_servicegroups_contacts = parse_source_servicegroupsroles(
            contacts[1], custname)

    except (ConnectorHttpError, ConnectorParseError) as exc:
        logger.warn(
            'SITE_CONTACTS and SERVICERGOUP_CONTACT methods not implemented')

    try:
        toposcope = confcust.get_toposcope()
        topofeedendpoints = confcust.get_topofeedendpoints()
        topofeedservicegroups = confcust.get_topofeedservicegroups()
        topofeedsites = confcust.get_topofeedsites()
        global SERVICE_ENDPOINTS_PI, SERVICE_GROUPS_PI, SITES_PI
        if toposcope:
            SERVICE_ENDPOINTS_PI = SERVICE_ENDPOINTS_PI + toposcope
            SERVICE_GROUPS_PI = SERVICE_GROUPS_PI + toposcope
            SITES_PI = SITES_PI + toposcope
        if topofeedendpoints:
            SERVICE_ENDPOINTS_PI = topofeedendpoints
        else:
            SERVICE_ENDPOINTS_PI = topofeed + SERVICE_ENDPOINTS_PI
        if topofeedservicegroups:
            SERVICE_GROUPS_PI = topofeedservicegroups
        else:
            SERVICE_GROUPS_PI = topofeed + SERVICE_GROUPS_PI
        if topofeedsites:
            SITES_PI = topofeedsites
        else:
            SITES_PI = topofeed + SITES_PI

        fetched_sites, fetched_servicegroups, fetched_endpoints = None, None, None
        fetched_bdii = None

        coros = [fetch_data(SERVICE_ENDPOINTS_PI, auth_opts, topofeedpaging)]
        if 'servicegroups' in topofetchtype:
            coros.append(
                fetch_data(SERVICE_GROUPS_PI, auth_opts, topofeedpaging))
        if 'sites' in topofetchtype:
            coros.append(fetch_data(SITES_PI, auth_opts, topofeedpaging))

        if bdii_opts and eval(bdii_opts['bdii']):
            host = bdii_opts['bdiihost']
            port = bdii_opts['bdiiport']
            base = bdii_opts['bdiiquerybase']

            coros.append(
                fetch_ldap_data(
                    host, port, base, bdii_opts['bdiiqueryfiltersrm'],
                    bdii_opts['bdiiqueryattributessrm'].split(' ')))

            coros.append(
                fetch_ldap_data(
                    host, port, base, bdii_opts['bdiiqueryfiltersepath'],
                    bdii_opts['bdiiqueryattributessepath'].split(' ')))

        # fetch topology data concurrently in coroutines
        fetched_topology = loop.run_until_complete(
            asyncio.gather(*coros, return_exceptions=True))

        fetched_endpoints = fetched_topology[0]
        if bdii_opts and eval(bdii_opts['bdii']):
            fetched_bdii = list()
            fetched_bdii.append(fetched_topology[-2])
            fetched_bdii.append(fetched_topology[-1])
        if 'sites' in topofetchtype and 'servicegroups' in topofetchtype:
            fetched_servicegroups, fetched_sites = (fetched_topology[1],
                                                    fetched_topology[2])
        elif 'sites' in topofetchtype:
            fetched_sites = fetched_topology[1]
        elif 'servicegroups' in topofetchtype:
            fetched_servicegroups = fetched_topology[1]

        exc_raised, exc = contains_exception(fetched_topology)
        if exc_raised:
            raise ConnectorHttpError(repr(exc))

        # proces data in parallel using multiprocessing
        executor = ProcessPoolExecutor(max_workers=3)
        parse_workers = list()
        exe_parse_source_endpoints = partial(parse_source_endpoints,
                                             fetched_endpoints, custname,
                                             uidservtype, pass_extensions)
        exe_parse_source_servicegroups = partial(parse_source_servicegroups,
                                                 fetched_servicegroups,
                                                 custname, uidservtype,
                                                 pass_extensions)
        exe_parse_source_sites = partial(parse_source_sites, fetched_sites,
                                         custname, uidservtype,
                                         pass_extensions)

        # parse topology depend on configured components fetch. we can fetch
        # only sites, only servicegroups or both.
        if fetched_servicegroups and fetched_sites:
            parse_workers.append(
                loop.run_in_executor(executor, exe_parse_source_endpoints))
            parse_workers.append(
                loop.run_in_executor(executor, exe_parse_source_servicegroups))
            parse_workers.append(
                loop.run_in_executor(executor, exe_parse_source_sites))
        elif fetched_servicegroups and not fetched_sites:
            parse_workers.append(
                loop.run_in_executor(executor, exe_parse_source_servicegroups))
        elif fetched_sites and not fetched_servicegroups:
            parse_workers.append(
                loop.run_in_executor(executor, exe_parse_source_endpoints))
            parse_workers.append(
                loop.run_in_executor(executor, exe_parse_source_sites))

        parsed_topology = loop.run_until_complete(
            asyncio.gather(*parse_workers))

        if fetched_servicegroups and fetched_sites:
            group_endpoints = parsed_topology[0]
            group_groups, group_endpoints_sg = parsed_topology[1]
            group_endpoints += group_endpoints_sg
            group_groups += parsed_topology[2]
        elif fetched_servicegroups and not fetched_sites:
            group_groups, group_endpoints = parsed_topology[0]
        elif fetched_sites and not fetched_servicegroups:
            group_endpoints = parsed_topology[0]
            group_groups = parsed_topology[1]

        # check if we fetched SRM port info and attach it appropriate endpoint
        # data
        if bdii_opts and eval(bdii_opts['bdii']):
            attach_srmport_topodata(
                logger, bdii_opts['bdiiqueryattributessrm'].split(' ')[0],
                fetched_bdii[0], group_endpoints)
            attach_sepath_topodata(
                logger, bdii_opts['bdiiqueryattributessepath'].split(' ')[0],
                fetched_bdii[1], group_endpoints)

        # parse contacts from fetched service endpoints topology, if there are
        # any
        parsed_serviceendpoint_contacts = parse_source_serviceendpoints_contacts(
            fetched_endpoints, custname)

        if not parsed_site_contacts and fetched_sites:
            # GOCDB has not SITE_CONTACTS, try to grab contacts from fetched
            # sites topology entities
            parsed_site_contacts = parse_source_siteswithcontacts(
                fetched_sites, custname)

        attach_contacts_workers = [
            loop.run_in_executor(
                executor,
                partial(attach_contacts_topodata, logger, parsed_site_contacts,
                        group_groups)),
            loop.run_in_executor(
                executor,
                partial(attach_contacts_topodata, logger,
                        parsed_serviceendpoint_contacts, group_endpoints))
        ]

        executor = ProcessPoolExecutor(max_workers=2)
        group_groups, group_endpoints = loop.run_until_complete(
            asyncio.gather(*attach_contacts_workers))

        if parsed_servicegroups_contacts:
            attach_contacts_topodata(logger, parsed_servicegroups_contacts,
                                     group_groups)
        elif fetched_servicegroups:
            # GOCDB has not SERVICEGROUP_CONTACTS, try to grab contacts from fetched
            # servicegroups topology entities
            parsed_servicegroups_contacts = parse_source_servicegroupscontacts(
                fetched_servicegroups, custname)
            attach_contacts_topodata(logger, parsed_servicegroups_contacts,
                                     group_groups)

        loop.run_until_complete(write_state(confcust, fixed_date, True))

        webapi_opts = get_webapi_opts(cglob, confcust)

        numge = len(group_endpoints)
        numgg = len(group_groups)

        # send concurrently to WEB-API in coroutines
        if eval(globopts['GeneralPublishWebAPI'.lower()]):
            loop.run_until_complete(
                asyncio.gather(
                    send_webapi(webapi_opts, group_groups, 'groups',
                                fixed_date),
                    send_webapi(webapi_opts, group_endpoints, 'endpoints',
                                fixed_date)))

        if eval(globopts['GeneralWriteAvro'.lower()]):
            write_avro(confcust, group_groups, group_endpoints, fixed_date)

        logger.info('Customer:' + custname + ' Type:%s ' %
                    (','.join(topofetchtype)) + 'Fetched Endpoints:%d' %
                    (numge) + ' Groups:%d' % (numgg))

    except (ConnectorParseError, ConnectorHttpError, KeyboardInterrupt) as exc:
        logger.error(repr(exc))
        loop.run_until_complete(write_state(confcust, fixed_date, False))

    finally:
        loop.close()
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(description="""Fetch weights information from Gstat provider
                                                    for every job listed in customer.conf""")
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed=VAPORPI)

    for feed, jobcust in feeds.items():
        weights = Vapor(feed)
        datawr = None

        customers = set(map(lambda jc: confcust.get_custname(jc[1]), jobcust))
        customers = customers.pop() if len(customers) == 1 else '({0})'.format(','.join(customers))
        jobs = set(map(lambda jc: jc[0], jobcust))
        jobs = jobs.pop() if len(jobs) == 1 else '({0})'.format(','.join(jobs))
        logger.job = jobs
        logger.customer = customers

        w = weights.getWeights()

        for job, cust in jobcust:
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            custname = confcust.get_custname(cust)
            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' % (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, weights.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, weights.state,
                                   globopts['InputStateDays'.lower()])

            if not weights.state:
                continue

            datawr = data_out(w)
            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasWeights'.lower()], 'weights',
                         partdate, datawr)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger, globopts['OutputWeights'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputWeights'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasWeights'.lower()], filename)
                ret, excep = avro.write(datawr)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

        if datawr:
            custs = set([cust for job, cust in jobcust])
            for cust in custs:
                jobs = [job for job, lcust in jobcust if cust == lcust]
                logger.info('Customer:%s Jobs:%s Sites:%d' % (confcust.get_custname(cust),
                                                              jobs[0] if len(jobs) == 1 else '({0})'.format(','.join(jobs)),
                                                              len(datawr)))
Exemple #16
0
def main():
    global logger, globopts, confcust
    parser = argparse.ArgumentParser(
        description="""Fetch entities (ServiceGroups, Sites, Endpoints)
                                                    from GOCDB for every customer and job listed in customer.conf and write them
                                                    in an appropriate place""")
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0],
                                     'GOCDB',
                                     deffeed='https://goc.egi.eu/gocdbpi/')

    for feed, jobcust in feeds.items():
        scopes = confcust.get_feedscopes(feed, jobcust)
        paging = confcust.is_paginated(feed, jobcust)
        auth_custopts = confcust.get_authopts(feed, jobcust)
        auth_opts = cglob.merge_opts(auth_custopts, 'authentication')
        auth_complete, missing = cglob.is_complete(auth_opts, 'authentication')
        if auth_complete:
            gocdb = GOCDBReader(feed, scopes, paging, auth=auth_opts)
        else:
            logger.error('%s options incomplete, missing %s' %
                         ('authentication', ' '.join(missing)))
            continue

        for job, cust in jobcust:
            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)

            global fetchtype, custname
            fetchtype = confcust.get_fetchtype(job)
            uidservtype = confcust.pass_uidserviceendpoints(job)
            custname = confcust.get_custname(cust)

            logger.customer = custname
            logger.job = job

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error(
                    'Customer:%s Job:%s %s options incomplete, missing %s' %
                    (custname, logger.job, 'ams', ' '.join(missopt)))
                continue

            if fetchtype == 'ServiceGroups':
                group_endpoints = gocdb.getGroupOfServices(uidservtype)
            else:
                group_endpoints = gocdb.getGroupOfEndpoints(uidservtype)
            group_groups = gocdb.getGroupOfGroups()

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, gocdb.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, gocdb.state,
                                   globopts['InputStateDays'.lower()])

            if not gocdb.state:
                continue

            numge = len(group_endpoints)
            numgg = len(group_groups)

            ggtags = confcust.get_gocdb_ggtags(job)
            getags = confcust.get_gocdb_getags(job)
            tf = TopoFilter(group_groups, group_endpoints, ggtags, getags)
            group_groups = tf.gg
            group_endpoints = tf.ge

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                         'group_groups', partdate, group_groups)

                ams.send(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    'group_endpoints', partdate, group_endpoints)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                    filename)
                ret, excep = avro.write(group_groups)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    filename)
                ret, excep = avro.write(group_endpoints)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job +
                        ' Fetched Endpoints:%d' % (numge) + ' Groups(%s):%d' %
                        (fetchtype, numgg))
            if getags or ggtags:
                selstr = 'Customer:%s Job:%s Selected ' % (custname, job)
                selge, selgg = '', ''
                if getags:
                    for key, value in getags.items():
                        if isinstance(value, list):
                            value = '[' + ','.join(value) + ']'
                        selge += '%s:%s,' % (key, value)
                    selstr += 'Endpoints(%s):' % selge[:len(selge) - 1]
                    selstr += '%d ' % (len(group_endpoints))
                if ggtags:
                    for key, value in ggtags.items():
                        if isinstance(value, list):
                            value = '[' + ','.join(value) + ']'
                        selgg += '%s:%s,' % (key, value)
                    selstr += 'Groups(%s):' % selgg[:len(selgg) - 1]
                    selstr += '%d' % (len(group_groups))

                logger.info(selstr)
def main():
    global logger, globopts, confcust

    parser = argparse.ArgumentParser(description="""Fetch and construct entities from EOSC-PORTAL feed""")
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = list(), list()
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    global custname
    custname = confcust.get_custname()

    # safely assume here one customer defined in customer file
    cust = list(confcust.get_customers())[0]
    jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust)
    fetchtype = confcust.get_topofetchtype()[0]

    state = None
    logger.customer = custname
    uidservtype = confcust.get_uidserviceendpoints()
    topofeed = confcust.get_topofeed()

    loop = uvloop.new_event_loop()
    asyncio.set_event_loop(loop)

    try:
        if is_feed(topofeed):
            res = loop.run_until_complete(fetch_data(topofeed))
            group_groups, group_endpoints = parse_source_topo(res, uidservtype, fetchtype)
            contacts = ParseContacts(logger, res, uidservtype, is_csv=False).get_contacts()
            attach_contacts_topodata(logger, contacts, group_endpoints)
        else:
            try:
                with open(topofeed) as fp:
                    js = json.load(fp)
                    group_groups, group_endpoints = parse_source_topo(js, uidservtype, fetchtype)
            except IOError as exc:
                logger.error('Customer:%s : Problem opening %s - %s' % (logger.customer, topofeed, repr(exc)))

        loop.run_until_complete(
            write_state(confcust, fixed_date, True)
        )

        webapi_opts = get_webapi_opts(cglob, confcust)

        numge = len(group_endpoints)
        numgg = len(group_groups)

        # send concurrently to WEB-API in coroutines
        if eval(globopts['GeneralPublishWebAPI'.lower()]):
            loop.run_until_complete(
                asyncio.gather(
                    send_webapi(webapi_opts, group_groups, 'groups', fixed_date),
                    send_webapi(webapi_opts, group_endpoints,'endpoints', fixed_date)
                )
            )

        if eval(globopts['GeneralWriteAvro'.lower()]):
            write_avro(confcust, group_groups, group_endpoints, fixed_date)

        logger.info('Customer:' + custname + ' Fetched Endpoints:%d' % (numge) + ' Groups(%s):%d' % (fetchtype, numgg))

    except (ConnectorHttpError, ConnectorParseError, KeyboardInterrupt) as exc:
        logger.error(repr(exc))
        loop.run_until_complete(
            write_state(confcust, fixed_date, False )
        )
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(
        description=
        'Fetch POEM profile for every job of the customer and write POEM expanded profiles needed for prefilter for EGI customer'
    )
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument(
        '-np',
        dest='noprefilter',
        help='do not write POEM expanded profiles for prefilter',
        required=False,
        action='store_true')
    parser.add_argument('-p',
                        dest='poemconf',
                        nargs=1,
                        metavar='poem-connector.conf',
                        help='path to poem-connector configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    servers = {'PoemServer': ['Host', 'VO']}
    filterprofiles = {'FetchProfiles': ['List']}
    prefilterdata = {
        'PrefilterData':
        ['AllowedNGI', 'AllowedNGIProfiles', 'AllNGI', 'AllNGIProfiles']
    }
    global cpoem, poemopts
    confpath = args.poemconf[0] if args.poemconf else None
    cpoem = PoemConf(confpath, servers, filterprofiles, prefilterdata)
    poemopts = cpoem.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    customers = set(
        map(lambda c: confcust.get_custname(c), confcust.get_customers()))
    customers = customers.pop() if len(customers) == 1 else '({0})'.format(
        ','.join(customers))
    logger.customer = customers
    customers = confcust.get_customers()
    jobs = list()
    for c in customers:
        jobs = jobs + confcust.get_jobs(c)
    jobs = jobs.pop() if len(jobs) == 1 else '({0})'.format(','.join(jobs))
    logger.job = jobs

    readerInstance = PoemReader(args.noprefilter)
    ps, psa = readerInstance.getProfiles()

    if not args.noprefilter and ps:
        poempref = PrefilterPoem()
        preffname = filename_date(
            logger, globopts['PrefilterPoemExpandedProfiles'.lower()], '')
        poempref.writeProfiles(ps, preffname)

    for cust in confcust.get_customers():
        # write profiles

        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' %
                             (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir,
                                   readerInstance.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir,
                                   readerInstance.state,
                                   globopts['InputStateDays'.lower()])

            if not readerInstance.state:
                continue

            profiles = confcust.get_profiles(job)
            lfprofiles = gen_outprofiles(psa, profiles)

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp().replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasPoem'.lower()], 'metric_profile',
                         partdate, lfprofiles)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger,
                                             globopts['OutputPoem'.lower()],
                                             jobdir,
                                             fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger,
                                             globopts['OutputPoem'.lower()],
                                             jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasPoem'.lower()],
                                         filename)
                ret, excep = avro.write(lfprofiles)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job +
                        ' Profiles:%s Tuples:%d' %
                        (','.join(profiles), len(lfprofiles)))