def main():
    parser = argparse.ArgumentParser(description="""Fetch entities (ServiceGroups, Sites, Endpoints)
                                                    from GOCDB for every customer and job listed in customer.conf and write them
                                                    in an appropriate place""")
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []

    global logger
    logger = Logger(os.path.basename(sys.argv[0]))

    certs = {'Authentication': ['HostKey', 'HostCert', 'CAPath', 'CAFile', 'VerifyServerCert']}
    schemas = {'AvroSchemas': ['TopologyGroupOfEndpoints', 'TopologyGroupOfGroups']}
    output = {'Output': ['TopologyGroupOfEndpoints', 'TopologyGroupOfGroups']}
    conn = {'Connection': ['Timeout', 'Retry']}
    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(confpath, certs, schemas, output, conn)
    global globopts
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    feeds = confcust.get_mapfeedjobs(sys.argv[0], 'GOCDB', deffeed='https://goc.egi.eu/gocdbpi/')

    timestamp = datetime.datetime.utcnow().strftime('%Y_%m_%d')

    for feed, jobcust in feeds.items():
        scopes = confcust.get_feedscopes(feed, jobcust)
        gocdb = GOCDBReader(feed, scopes)

        for job, cust in jobcust:
            jobdir = confcust.get_fulldir(cust, job)
            global fetchtype
            fetchtype = confcust.get_gocdb_fetchtype(job)
            global custname
            custname = confcust.get_custname(cust)

            if fetchtype == 'ServiceGroups':
                group_endpoints = gocdb.getGroupOfServices()
            else:
                group_endpoints = gocdb.getGroupOfEndpoints()
            group_groups = gocdb.getGroupOfGroups()

            numge = len(group_endpoints)
            numgg = len(group_groups)

            ggtags = confcust.get_gocdb_ggtags(job)
            if ggtags:
                group_groups = filter_by_tags(ggtags, group_groups)

            filename = gen_fname_repdate(logger, timestamp, globopts['OutputTopologyGroupOfGroups'.lower()], jobdir)
            avro = AvroWriter(globopts['AvroSchemasTopologyGroupOfGroups'.lower()], filename,
                            group_groups, os.path.basename(sys.argv[0]))
            avro.write()

            gelegmap = []
            for g in group_endpoints:
                if g['service'] in LegMapServType.keys():
                    gelegmap.append(copy.copy(g))
                    gelegmap[-1]['service'] = LegMapServType[g['service']]
            getags = confcust.get_gocdb_getags(job)
            numgeleg = len(gelegmap)
            if getags:
                group_endpoints = filter_by_tags(getags, group_endpoints)
                gelegmap = filter_by_tags(getags, gelegmap)

            filename = gen_fname_repdate(logger, timestamp, globopts['OutputTopologyGroupOfEndpoints'.lower()], jobdir)
            avro = AvroWriter(globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()], filename,
                            group_endpoints + gelegmap, os.path.basename(sys.argv[0]))
            avro.write()

            logger.info('Customer:'+custname+' Job:'+job+' Fetched Endpoints:%d' % (numge + numgeleg) +' Groups(%s):%d' % (fetchtype, numgg))
            if getags or ggtags:
                selstr = 'Customer:%s Job:%s Selected ' % (custname, job)
                selge, selgg = '', ''
                if getags:
                    for key, value in getags.items():
                        if isinstance(value, list):
                            value = '['+','.join(value)+']'
                        selge += '%s:%s,' % (key, value)
                    selstr += 'Endpoints(%s):' % selge[:len(selge) - 1]
                    selstr += '%d ' % (len(group_endpoints) + len(gelegmap))
                if ggtags:
                    for key, value in ggtags.items():
                        if isinstance(value, list):
                            value = '['+','.join(value)+']'
                        selgg += '%s:%s,' % (key, value)
                    selstr += 'Groups(%s):' % selgg[:len(selgg) - 1]
                    selstr += '%d' % (len(group_groups))

                logger.info(selstr)
def main():
    parser = argparse.ArgumentParser(description="""Fetch wanted entities from VO feed provided in customer.conf
                                                    and write them in an appropriate place""")
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    args = parser.parse_args()

    global logger
    logger = Logger(os.path.basename(sys.argv[0]))

    certs = {'Authentication': ['HostKey', 'HostCert', 'CAPath', 'CAFile', 'VerifyServerCert']}
    schemas = {'AvroSchemas': ['TopologyGroupOfEndpoints', 'TopologyGroupOfGroups']}
    output = {'Output': ['TopologyGroupOfEndpoints', 'TopologyGroupOfGroups']}
    conn = {'Connection': ['Timeout', 'Retry']}
    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(confpath, certs, schemas, output, conn)
    global globopts
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    feeds = confcust.get_mapfeedjobs(sys.argv[0], 'VOFeed')

    timestamp = datetime.datetime.utcnow().strftime('%Y_%m_%d')

    for feed, jobcust in feeds.items():
        vo = VOReader(feed)

        for job, cust in jobcust:
            jobdir = confcust.get_fulldir(cust, job)

            custname = confcust.get_custname(cust)

            filtlgroups = vo.get_groupgroups()
            numgg = len(filtlgroups)
            tags = confcust.get_vo_ggtags(job)
            if tags:
                def ismatch(elem):
                    values = tags['Type']
                    e = elem['type'].lower()
                    for val in values:
                        if e == val.lower():
                            return True
                filtlgroups = filter(ismatch, filtlgroups)

            filename = gen_fname_repdate(logger, timestamp, globopts['OutputTopologyGroupOfGroups'.lower()], jobdir)
            avro = AvroWriter(globopts['AvroSchemasTopologyGroupOfGroups'.lower()], filename, filtlgroups,
                              os.path.basename(sys.argv[0]))
            avro.write()

            gelegmap = []
            group_endpoints = vo.get_groupendpoints()
            numge = len(group_endpoints)
            for g in group_endpoints:
                if g['service'] in LegMapServType.keys():
                    gelegmap.append(copy.copy(g))
                    gelegmap[-1]['service'] = LegMapServType[g['service']]
            filename = gen_fname_repdate(logger, timestamp, globopts['OutputTopologyGroupOfEndpoints'.lower()], jobdir)
            avro = AvroWriter(globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()], filename, group_endpoints + gelegmap,
                                                                                       os.path.basename(sys.argv[0]))
            avro.write()

            logger.info('Customer:' + custname + ' Job:' + job + ' Fetched Endpoints:%d' % (numge + len(gelegmap))+' Groups:%d' % (numgg))
            if tags:
                selstr = 'Customer:%s Job:%s Selected ' % (custname, job)
                selgg = ''
                for key, value in tags.items():
                    if isinstance(value, list):
                        value = ','.join(value)
                    selgg += '%s:%s,' % (key, value)
                selstr += 'Groups(%s):' % selgg[:len(selgg) - 1]
                selstr += '%d' % (len(filtlgroups))

                logger.info(selstr)
Esempio n. 3
0
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(description='Fetch downtimes from GOCDB for given date')
    parser.add_argument('-d', dest='date', nargs=1, metavar='YEAR-MONTH-DAY', required=True)
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))
    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed='https://goc.egi.eu/gocdbpi/')


    if len(args.date) == 0:
        print parser.print_help()
        raise SystemExit(1)

    # calculate start and end times
    try:
        start = datetime.datetime.strptime(args.date[0], '%Y-%m-%d')
        end = datetime.datetime.strptime(args.date[0], '%Y-%m-%d')
        timestamp = start.strftime('%Y_%m_%d')
        start = start.replace(hour=0, minute=0, second=0)
        end = end.replace(hour=23, minute=59, second=59)
    except ValueError as e:
        logger.error(e)
        raise SystemExit(1)


    for feed, jobcust in feeds.items():
        customers = set(map(lambda jc: confcust.get_custname(jc[1]), jobcust))
        customers = customers.pop() if len(customers) == 1 else '({0})'.format(','.join(customers))
        jobs = set(map(lambda jc: jc[0], jobcust))
        jobs = jobs.pop() if len(jobs) == 1 else '({0})'.format(','.join(jobs))
        logger.job = jobs
        logger.customer = customers

        auth_custopts = confcust.get_authopts(feed, jobcust)
        auth_opts = cglob.merge_opts(auth_custopts, 'authentication')
        auth_complete, missing = cglob.is_complete(auth_opts, 'authentication')
        if auth_complete:
            gocdb = GOCDBReader(feed, auth=auth_opts)
            dts = gocdb.getDowntimes(start, end)
        else:
            logger.error('Customer:%s Jobs:%s %s options incomplete, missing %s'
                         % (logger.customer, logger.job, 'authentication',
                            ''.join(missing)))
            continue

        for job, cust in jobcust:
            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            logger.customer = confcust.get_custname(cust)
            logger.job = job

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s Job:%s %s options incomplete, missing %s' % (logger.customer, job, 'ams', ' '.join(missopt)))
                continue

            output.write_state(sys.argv[0], jobstatedir, gocdb.state, globopts['InputStateDays'.lower()], timestamp)

            if not gocdb.state:
                continue

            if eval(globopts['GeneralPublishAms'.lower()]):
                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasDowntimes'.lower()], 'downtimes',
                         timestamp.replace('_', '-'), dts)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                filename = filename_date(logger, globopts['OutputDowntimes'.lower()], jobdir, stamp=timestamp)
                avro = output.AvroWriter(globopts['AvroSchemasDowntimes'.lower()], filename)
                ret, excep = avro.write(dts)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

        if gocdb.state:
            custs = set([cust for job, cust in jobcust])
            for cust in custs:
                jobs = [job for job, lcust in jobcust if cust == lcust]
                logger.info('Customer:%s Jobs:%s Fetched Date:%s Endpoints:%d' % (confcust.get_custname(cust),
                                                                                  jobs[0] if len(jobs) == 1 else '({0})'.format(','.join(jobs)),
                                                                                  args.date[0], len(dts)))
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(description="""Fetch weights information from Gstat provider
                                                    for every job listed in customer.conf""")
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed=VAPORPI)

    for feed, jobcust in feeds.items():
        weights = Vapor(feed)
        datawr = None

        customers = set(map(lambda jc: confcust.get_custname(jc[1]), jobcust))
        customers = customers.pop() if len(customers) == 1 else '({0})'.format(','.join(customers))
        jobs = set(map(lambda jc: jc[0], jobcust))
        jobs = jobs.pop() if len(jobs) == 1 else '({0})'.format(','.join(jobs))
        logger.job = jobs
        logger.customer = customers

        w = weights.getWeights()

        for job, cust in jobcust:
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            custname = confcust.get_custname(cust)
            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' % (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, weights.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, weights.state,
                                   globopts['InputStateDays'.lower()])

            if not weights.state:
                continue

            datawr = data_out(w)
            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasWeights'.lower()], 'weights',
                         partdate, datawr)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger, globopts['OutputWeights'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputWeights'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasWeights'.lower()], filename)
                ret, excep = avro.write(datawr)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

        if datawr:
            custs = set([cust for job, cust in jobcust])
            for cust in custs:
                jobs = [job for job, lcust in jobcust if cust == lcust]
                logger.info('Customer:%s Jobs:%s Sites:%d' % (confcust.get_custname(cust),
                                                              jobs[0] if len(jobs) == 1 else '({0})'.format(','.join(jobs)),
                                                              len(datawr)))
Esempio n. 5
0
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(
        description="""Fetch weights information from Gstat provider
                                                    for every job listed in customer.conf"""
    )
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed=VAPORPI)

    j = 0
    for feed, jobcust in feeds.items():
        weights = Vapor(feed)
        datawr = None

        customers = set(map(lambda jc: confcust.get_custname(jc[1]), jobcust))
        customers = customers.pop() if len(customers) == 1 else '({0})'.format(
            ','.join(customers))
        sjobs = set(map(lambda jc: jc[0], jobcust))
        jobs = list(sjobs)[0] if len(sjobs) == 1 else '({0})'.format(
            ','.join(sjobs))
        logger.job = jobs
        logger.customer = customers

        for job, cust in jobcust:
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            write_empty = confcust.send_empty(sys.argv[0], cust)

            if not write_empty:
                w = weights.getWeights()
            else:
                w = []
                weights.state = True

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)

            custname = confcust.get_custname(cust)
            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' %
                             (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, weights.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, weights.state,
                                   globopts['InputStateDays'.lower()])

            if not weights.state:
                continue

            datawr = data_out(w)
            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasWeights'.lower()], 'weights',
                         partdate, datawr)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger,
                                             globopts['OutputWeights'.lower()],
                                             jobdir,
                                             fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger,
                                             globopts['OutputWeights'.lower()],
                                             jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasWeights'.lower()], filename)
                ret, excep = avro.write(datawr)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            j += 1

        if datawr or write_empty:
            custs = set([cust for job, cust in jobcust])
            for cust in custs:
                jobs = [job for job, lcust in jobcust if cust == lcust]
                logger.info(
                    'Customer:%s Jobs:%s Sites:%d' %
                    (confcust.get_custname(cust), jobs[0] if len(jobs) == 1
                     else '({0})'.format(','.join(jobs)), len(datawr)))
def main():
    parser = argparse.ArgumentParser(
        description="""Fetch and construct entities from NEANIAS feed""")
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    for cust in confcust.get_customers():
        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            jobdir = confcust.get_fulldir(cust, job)
            logger.customer = confcust.get_custname(cust)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)
            fetchtype = confcust.get_fetchtype(job)

            state = None
            logger.job = job
            logger.customer = custname

            uidservtype = confcust.pass_uidserviceendpoints(job)
            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')

            feeds = confcust.get_mapfeedjobs(sys.argv[0])
            if is_feed(feeds.keys()[0]):
                remote_topo = urlparse(feeds.keys()[0])
                res = input.connection(logger, 'NEANIAS', globopts,
                                       remote_topo.scheme, remote_topo.netloc,
                                       remote_topo.path)
                if not res:
                    raise input.ConnectorError()
                doc = input.parse_json(
                    logger, 'NEANIAS', globopts, res, remote_topo.scheme +
                    '://' + remote_topo.netloc + remote_topo.path)
                eosc = EOSCReader(doc, uidservtype, fetchtype)
                group_groups = eosc.get_groupgroups()
                group_endpoints = eosc.get_groupendpoints()
                state = True
            else:
                try:
                    with open(feeds.keys()[0]) as fp:
                        js = json.load(fp)
                        eosc = EOSCReader(js, uidservtype, fetchtype)
                        group_groups = eosc.get_groupgroups()
                        group_endpoints = eosc.get_groupendpoints()
                        state = True
                except IOError as exc:
                    logger.error(
                        'Customer:%s Job:%s : Problem opening %s - %s' %
                        (logger.customer, logger.job, feeds.keys()[0],
                         repr(exc)))
                    state = False

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, state,
                                   globopts['InputStateDays'.lower()])

            if not state:
                continue

            numge = len(group_endpoints)
            numgg = len(group_groups)

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                         'group_groups', partdate, group_groups)

                ams.send(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    'group_endpoints', partdate, group_endpoints)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                    filename)
                ret, excep = avro.write(group_groups)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    filename)
                ret, excep = avro.write(group_endpoints)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job +
                        ' Fetched Endpoints:%d' % (numge) + ' Groups(%s):%d' %
                        (fetchtype, numgg))
Esempio n. 7
0
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(
        description="""Fetch weights information from Gstat provider
                                                    for every job listed in customer.conf"""
    )
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed=VAPORPI)

    loop = uvloop.new_event_loop()
    asyncio.set_event_loop(loop)

    for feed, jobcust in feeds.items():
        customers = set(map(lambda jc: confcust.get_custname(jc[1]), jobcust))
        customers = customers.pop() if len(customers) == 1 else '({0})'.format(
            ','.join(customers))
        sjobs = set(map(lambda jc: jc[0], jobcust))
        jobs = list(sjobs)[0] if len(sjobs) == 1 else '({0})'.format(
            ','.join(sjobs))
        logger.job = jobs
        logger.customer = customers

        try:
            res = loop.run_until_complete(fetch_data(feed))
            weights = parse_source(res)

            for job, cust in jobcust:
                logger.customer = confcust.get_custname(cust)
                logger.job = job

                write_empty = confcust.send_empty(sys.argv[0], cust)

                if write_empty:
                    weights = []

                webapi_opts = get_webapi_opts(cust, job, cglob, confcust)

                if eval(globopts['GeneralPublishWebAPI'.lower()]):
                    loop.run_until_complete(
                        send_webapi(job, confcust, webapi_opts, fixed_date,
                                    weights))

                if eval(globopts['GeneralWriteAvro'.lower()]):
                    write_avro(cust, job, confcust, fixed_date, weights)

                loop.run_until_complete(
                    write_state(cust, job, confcust, fixed_date, True))

                if weights or write_empty:
                    custs = set([cust for job, cust in jobcust])
                    for cust in custs:
                        jobs = [job for job, lcust in jobcust if cust == lcust]
                        logger.info(
                            'Customer:%s Jobs:%s Sites:%d' %
                            (confcust.get_custname(cust), jobs[0] if len(jobs)
                             == 1 else '({0})'.format(','.join(jobs)),
                             len(weights)))

        except (ConnectorHttpError, ConnectorParseError,
                KeyboardInterrupt) as exc:
            logger.error(repr(exc))
            for job, cust in jobcust:
                loop.run_until_complete(
                    write_state(cust, job, confcust, fixed_date, False))
Esempio n. 8
0
def main():
    global logger, globopts, confcust
    parser = argparse.ArgumentParser(
        description="""Fetch entities (ServiceGroups, Sites, Endpoints)
                                                    from GOCDB for every customer and job listed in customer.conf and write them
                                                    in an appropriate place""")
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0],
                                     'GOCDB',
                                     deffeed='https://goc.egi.eu/gocdbpi/')

    for feed, jobcust in feeds.items():
        scopes = confcust.get_feedscopes(feed, jobcust)
        paging = confcust.is_paginated(feed, jobcust)
        auth_custopts = confcust.get_authopts(feed, jobcust)
        auth_opts = cglob.merge_opts(auth_custopts, 'authentication')
        auth_complete, missing = cglob.is_complete(auth_opts, 'authentication')
        if auth_complete:
            gocdb = GOCDBReader(feed, scopes, paging, auth=auth_opts)
        else:
            logger.error('%s options incomplete, missing %s' %
                         ('authentication', ' '.join(missing)))
            continue

        for job, cust in jobcust:
            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)

            global fetchtype, custname
            fetchtype = confcust.get_fetchtype(job)
            uidservtype = confcust.pass_uidserviceendpoints(job)
            custname = confcust.get_custname(cust)

            logger.customer = custname
            logger.job = job

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error(
                    'Customer:%s Job:%s %s options incomplete, missing %s' %
                    (custname, logger.job, 'ams', ' '.join(missopt)))
                continue

            if fetchtype == 'ServiceGroups':
                group_endpoints = gocdb.getGroupOfServices(uidservtype)
            else:
                group_endpoints = gocdb.getGroupOfEndpoints(uidservtype)
            group_groups = gocdb.getGroupOfGroups()

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, gocdb.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, gocdb.state,
                                   globopts['InputStateDays'.lower()])

            if not gocdb.state:
                continue

            numge = len(group_endpoints)
            numgg = len(group_groups)

            ggtags = confcust.get_gocdb_ggtags(job)
            getags = confcust.get_gocdb_getags(job)
            tf = TopoFilter(group_groups, group_endpoints, ggtags, getags)
            group_groups = tf.gg
            group_endpoints = tf.ge

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                         'group_groups', partdate, group_groups)

                ams.send(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    'group_endpoints', partdate, group_endpoints)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                    filename)
                ret, excep = avro.write(group_groups)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    filename)
                ret, excep = avro.write(group_endpoints)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job +
                        ' Fetched Endpoints:%d' % (numge) + ' Groups(%s):%d' %
                        (fetchtype, numgg))
            if getags or ggtags:
                selstr = 'Customer:%s Job:%s Selected ' % (custname, job)
                selge, selgg = '', ''
                if getags:
                    for key, value in getags.items():
                        if isinstance(value, list):
                            value = '[' + ','.join(value) + ']'
                        selge += '%s:%s,' % (key, value)
                    selstr += 'Endpoints(%s):' % selge[:len(selge) - 1]
                    selstr += '%d ' % (len(group_endpoints))
                if ggtags:
                    for key, value in ggtags.items():
                        if isinstance(value, list):
                            value = '[' + ','.join(value) + ']'
                        selgg += '%s:%s,' % (key, value)
                    selstr += 'Groups(%s):' % selgg[:len(selgg) - 1]
                    selstr += '%d' % (len(group_groups))

                logger.info(selstr)
def main():
    global logger, globopts, confcust
    parser = argparse.ArgumentParser(description="""Fetch entities (ServiceGroups, Sites, Endpoints)
                                                    from GOCDB for every customer and job listed in customer.conf and write them
                                                    in an appropriate place""")
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
    feeds = confcust.get_mapfeedjobs(sys.argv[0], 'GOCDB', deffeed='https://goc.egi.eu/gocdbpi/')

    for feed, jobcust in feeds.items():
        scopes = confcust.get_feedscopes(feed, jobcust)
        paging = confcust.is_paginated(feed, jobcust)
        auth_custopts = confcust.get_authopts(feed, jobcust)
        auth_opts = cglob.merge_opts(auth_custopts, 'authentication')
        auth_complete, missing = cglob.is_complete(auth_opts, 'authentication')
        if auth_complete:
            gocdb = GOCDBReader(feed, scopes, paging, auth=auth_opts)
        else:
            logger.error('%s options incomplete, missing %s' % ('authentication', ' '.join(missing)))
            continue

        for job, cust in jobcust:
            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            global fetchtype, custname
            fetchtype = confcust.get_gocdb_fetchtype(job)
            custname = confcust.get_custname(cust)

            logger.customer = custname
            logger.job = job

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s Job:%s %s options incomplete, missing %s' % (custname, logger.job, 'ams', ' '.join(missopt)))
                continue

            if fetchtype == 'ServiceGroups':
                group_endpoints = gocdb.getGroupOfServices()
            else:
                group_endpoints = gocdb.getGroupOfEndpoints()
            group_groups = gocdb.getGroupOfGroups()

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, gocdb.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, gocdb.state,
                                   globopts['InputStateDays'.lower()])

            if not gocdb.state:
                continue

            numge = len(group_endpoints)
            numgg = len(group_groups)

            ggtags = confcust.get_gocdb_ggtags(job)
            getags = confcust.get_gocdb_getags(job)
            tf = TopoFilter(group_groups, group_endpoints, ggtags, getags)
            group_groups = tf.gg
            group_endpoints = tf.ge

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                         'group_groups', partdate, group_groups)

                ams.send(globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                         'group_endpoints', partdate, group_endpoints)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger, globopts['OutputTopologyGroupOfGroups'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputTopologyGroupOfGroups'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasTopologyGroupOfGroups'.lower()], filename)
                ret, excep = avro.write(group_groups)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

                if fixed_date:
                    filename = filename_date(logger, globopts['OutputTopologyGroupOfEndpoints'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputTopologyGroupOfEndpoints'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()], filename)
                ret, excep = avro.write(group_endpoints)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:'+custname+' Job:'+job+' Fetched Endpoints:%d' % (numge) +' Groups(%s):%d' % (fetchtype, numgg))
            if getags or ggtags:
                selstr = 'Customer:%s Job:%s Selected ' % (custname, job)
                selge, selgg = '', ''
                if getags:
                    for key, value in getags.items():
                        if isinstance(value, list):
                            value = '['+','.join(value)+']'
                        selge += '%s:%s,' % (key, value)
                    selstr += 'Endpoints(%s):' % selge[:len(selge) - 1]
                    selstr += '%d ' % (len(group_endpoints))
                if ggtags:
                    for key, value in ggtags.items():
                        if isinstance(value, list):
                            value = '['+','.join(value)+']'
                        selgg += '%s:%s,' % (key, value)
                    selstr += 'Groups(%s):' % selgg[:len(selgg) - 1]
                    selstr += '%d' % (len(group_groups))

                logger.info(selstr)
def main():
    parser = argparse.ArgumentParser(description='Fetch downtimes from GOCDB for given date')
    parser.add_argument('-d', dest='date', nargs=1, metavar='YEAR-MONTH-DAY', required=True)
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    args = parser.parse_args()

    global logger
    logger = Logger(os.path.basename(sys.argv[0]))
    certs = {'Authentication': ['HostKey', 'HostCert', 'CAPath', 'CAFile', 'VerifyServerCert']}
    schemas = {'AvroSchemas': ['Downtimes']}
    output = {'Output': ['Downtimes']}
    conn = {'Connection': ['Timeout', 'Retry']}
    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(confpath, certs, schemas, output, conn)
    global globopts
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed='https://goc.egi.eu/gocdbpi/')


    if len(args.date) == 0:
        print parser.print_help()
        raise SystemExit(1)

    # calculate start and end times
    try:
        start = datetime.datetime.strptime(args.date[0], '%Y-%m-%d')
        end = datetime.datetime.strptime(args.date[0], '%Y-%m-%d')
        timestamp = start.strftime('%Y_%m_%d')
    except ValueError as e:
        logger.error(e)
        raise SystemExit(1)
    start = start.replace(hour=0, minute=0, second=0)
    end = end.replace(hour=23, minute=59, second=59)

    for feed, jobcust in feeds.items():
        gocdb = GOCDBReader(feed)
        dts, parsed = gocdb.getDowntimes(start, end)

        dtslegmap = []
        if parsed:
            for dt in dts:
                if dt['service'] in LegMapServType.keys():
                    dtslegmap.append(copy.copy(dt))
                    dtslegmap[-1]['service'] = LegMapServType[dt['service']]
            for job, cust in jobcust:
                jobdir = confcust.get_fulldir(cust, job)
                custname = confcust.get_custname(cust)

                filename = gen_fname_repdate(logger, timestamp, globopts['OutputDowntimes'.lower()], jobdir)
                avro = AvroWriter(globopts['AvroSchemasDowntimes'.lower()], filename,
                                dts + dtslegmap, os.path.basename(sys.argv[0]))
                avro.write()

            custs = set([cust for job, cust in jobcust])
            for cust in custs:
                jobs = [job for job, lcust in jobcust if cust == lcust]
                logger.info('Customer:%s Jobs:%d Fetched Date:%s Endpoints:%d' % (cust, len(jobs), args.date[0], len(dts + dtslegmap)))
def main():
    parser = argparse.ArgumentParser(description="""Fetch weights information from Gstat provider
                                                    for every job listed in customer.conf""")
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    args = parser.parse_args()

    global logger
    logger = Logger(os.path.basename(sys.argv[0]))

    certs = {'Authentication': ['HostKey', 'HostCert', 'CAPath', 'CAFile', 'VerifyServerCert']}
    schemas = {'AvroSchemas': ['Weights']}
    output = {'Output': ['Weights']}
    conn = {'Connection': ['Timeout', 'Retry']}
    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(confpath, schemas, output, certs, conn)
    global globopts
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    feeds = confcust.get_mapfeedjobs(sys.argv[0], deffeed='https://operations-portal.egi.eu/vapor/downloadLavoisier/option/json/view/VAPOR_Ngi_Sites_Info')

    timestamp = datetime.datetime.utcnow().strftime('%Y_%m_%d')
    oldDate = datetime.datetime.utcnow()

    for feed, jobcust in feeds.items():
        weights = Vapor(feed)

        newweights = dict()
        newweights.update(weights.getWeights());

        for job, cust in jobcust:
            fileprev, existfileprev = None, None
            jobdir = confcust.get_fulldir(cust, job)

            oldDataExists = False
            now = datetime.datetime.utcnow
            i = 1
            while i <= 30:
                dayprev = datetime.datetime.utcnow() - datetime.timedelta(days=i)
                fileprev = gen_fname_repdate(logger, dayprev.strftime('%Y_%m_%d'), globopts['OutputWeights'.lower()], jobdir)
                if os.path.exists(fileprev):
                    existfileprev = fileprev
                    break
                i += 1

            oldweights = dict()
            # load old data
            if existfileprev:
                oldweights.update(loadOldData(existfileprev))

                # fill old list
                for key in oldweights:
                    val = int(oldweights[key])
                    if val <= 0:
                        if key in newweights:
                            oldweights[key] = str(newweights[key])
                    if key not in newweights:
                        newweights[key] = str(oldweights[key])

            # fill new list
            for key in newweights:
                val = int(newweights[key])
                if val <= 0:
                    if key in oldweights:
                        val = int(oldweights[key])
                if key not in oldweights:
                    oldweights[key] = str(val)
                newweights[key] = str(val)

            filename = gen_fname_repdate(logger, timestamp, globopts['OutputWeights'.lower()], jobdir)

            datawr = gen_outdict(newweights)
            avro = AvroWriter(globopts['AvroSchemasWeights'.lower()], filename, datawr, os.path.basename(sys.argv[0]))
            avro.write()

            if existfileprev:
                olddata = gen_outdict(oldweights)
                os.remove(existfileprev)
                avro = AvroWriter(globopts['AvroSchemasWeights'.lower()], existfileprev, olddata, os.path.basename(sys.argv[0]))
                avro.write()

        custs = set([cust for job, cust in jobcust])
        for cust in custs:
            jobs = [job for job, lcust in jobcust if cust == lcust]
            logger.info('Customer:%s Jobs:%d Sites:%d' % (cust, len(jobs), len(datawr)))