def main():
    parser = argparse.ArgumentParser(description='Fetch POEM profile for every job of the customer and write POEM expanded profiles needed for prefilter for EGI customer')
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-np', dest='noprefilter', help='do not write POEM expanded profiles for prefilter', required=False, action='store_true')
    parser.add_argument('-p', dest='poemconf', nargs=1, metavar='poem-connector.conf', help='path to poem-connector configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    args = parser.parse_args()

    global logger
    logger = Logger(os.path.basename(sys.argv[0]))

    certs = {'Authentication': ['HostKey', 'HostCert', 'VerifyServerCert', 'CAPath', 'CAFile']}
    schemas = {'AvroSchemas': ['Poem']}
    prefilter = {'Prefilter': ['PoemExpandedProfiles']}
    output = {'Output': ['Poem']}
    conn = {'Connection': ['Timeout', 'Retry']}
    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(confpath, certs, schemas, output, conn, prefilter)
    global globopts
    globopts = cglob.parse()
    timestamp = datetime.datetime.utcnow().strftime('%Y_%m_%d')

    servers = {'PoemServer': ['Host', 'VO']}
    filterprofiles = {'FetchProfiles': ['List']}
    prefilterdata = {'PrefilterData': ['AllowedNGI', 'AllowedNGIProfiles', 'AllNGI', 'AllNGIProfiles']}
    global cpoem, poemopts
    confpath = args.poemconf[0] if args.poemconf else None
    cpoem = PoemConf(confpath, servers, filterprofiles, prefilterdata)
    poemopts = cpoem.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()

    readerInstance = PoemReader(args.noprefilter)
    ps, psa = readerInstance.getProfiles()

    if not args.noprefilter:
        poempref = PrefilterPoem()
        preffname = gen_fname_repdate(logger, timestamp, globopts['PrefilterPoemExpandedProfiles'.lower()], '')
        poempref.writeProfiles(ps, preffname)

    for cust in confcust.get_customers():
        # write profiles

        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            jobdir = confcust.get_fulldir(cust, job)

            profiles = confcust.get_profiles(job)
            lfprofiles = gen_outprofiles(psa, profiles)

            filename = gen_fname_repdate(logger, timestamp, globopts['OutputPoem'.lower()], jobdir)
            avro = AvroWriter(globopts['AvroSchemasPoem'.lower()], filename,
                              lfprofiles, os.path.basename(sys.argv[0]))
            avro.write()

            logger.info('Customer:'+custname+' Job:'+job+' Profiles:%s Tuples:%d' % (','.join(profiles), len(lfprofiles)))
def main():
    parser = argparse.ArgumentParser(
        description="""Fetch and construct entities from NEANIAS feed""")
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()
    group_endpoints, group_groups = [], []
    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    for cust in confcust.get_customers():
        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            jobdir = confcust.get_fulldir(cust, job)
            logger.customer = confcust.get_custname(cust)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)
            fetchtype = confcust.get_fetchtype(job)

            state = None
            logger.job = job
            logger.customer = custname

            uidservtype = confcust.pass_uidserviceendpoints(job)
            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')

            feeds = confcust.get_mapfeedjobs(sys.argv[0])
            if is_feed(feeds.keys()[0]):
                remote_topo = urlparse(feeds.keys()[0])
                res = input.connection(logger, 'NEANIAS', globopts,
                                       remote_topo.scheme, remote_topo.netloc,
                                       remote_topo.path)
                if not res:
                    raise input.ConnectorError()
                doc = input.parse_json(
                    logger, 'NEANIAS', globopts, res, remote_topo.scheme +
                    '://' + remote_topo.netloc + remote_topo.path)
                eosc = EOSCReader(doc, uidservtype, fetchtype)
                group_groups = eosc.get_groupgroups()
                group_endpoints = eosc.get_groupendpoints()
                state = True
            else:
                try:
                    with open(feeds.keys()[0]) as fp:
                        js = json.load(fp)
                        eosc = EOSCReader(js, uidservtype, fetchtype)
                        group_groups = eosc.get_groupgroups()
                        group_endpoints = eosc.get_groupendpoints()
                        state = True
                except IOError as exc:
                    logger.error(
                        'Customer:%s Job:%s : Problem opening %s - %s' %
                        (logger.customer, logger.job, feeds.keys()[0],
                         repr(exc)))
                    state = False

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir, state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir, state,
                                   globopts['InputStateDays'.lower()])

            if not state:
                continue

            numge = len(group_endpoints)
            numgg = len(group_groups)

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                         'group_groups', partdate, group_groups)

                ams.send(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    'group_endpoints', partdate, group_endpoints)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfGroups'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfGroups'.lower()],
                    filename)
                ret, excep = avro.write(group_groups)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

                if fixed_date:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(
                        logger,
                        globopts['OutputTopologyGroupOfEndpoints'.lower()],
                        jobdir)
                avro = output.AvroWriter(
                    globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()],
                    filename)
                ret, excep = avro.write(group_endpoints)
                if not ret:
                    logger.error('Customer:%s Job:%s : %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job +
                        ' Fetched Endpoints:%d' % (numge) + ' Groups(%s):%d' %
                        (fetchtype, numgg))
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(description='Fetch POEM profile for every job of the customer and write POEM expanded profiles needed for prefilter for EGI customer')
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    for cust in confcust.get_customers():

        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            poemserver = dict()
            profiles = confcust.get_profiles(job)
            namespace = confcust.get_namespace(job)
            poemserver[confcust.get_poemserver_host(job)] = confcust.get_poemserver_vo(job)

            poem = PoemReader(custname, job)
            psa = poem.getProfiles(profiles, namespace, poemserver)

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' % (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir,
                                   poem.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir,
                                   poem.state,
                                   globopts['InputStateDays'.lower()])

            if not poem.state:
                continue

            lfprofiles = gen_outprofiles(psa, profiles)

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasPoem'.lower()], 'metric_profile',
                         partdate, lfprofiles)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger, globopts['OutputPoem'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputPoem'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasPoem'.lower()], filename)
                ret, excep = avro.write(lfprofiles)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:'+custname+' Job:'+job+' Profiles:%s Tuples:%d' % (','.join(profiles), len(lfprofiles)))
Exemple #4
0
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(description='Fetch metric profile for every job of the customer')
    parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
    parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
    parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    for cust in confcust.get_customers():
        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            profiles = confcust.get_profiles(job)
            webapi_custopts = confcust.get_webapiopts(cust)
            webapi_opts = cglob.merge_opts(webapi_custopts, 'webapi')
            webapi_complete, missopt = cglob.is_complete(webapi_opts, 'webapi')

            if not webapi_complete:
                logger.error('Customer:%s Job:%s %s options incomplete, missing %s' % (custname, logger.job, 'webapi', ' '.join(missopt)))
                continue

            webapi = WebAPI(custname, job, profiles, confcust.get_namespace(job),
                            webapi_opts['webapihost'],
                            webapi_opts['webapitoken'])
            fetched_profiles = webapi.get_profiles()

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust, job)

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' % (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir,
                                   webapi.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir,
                                   webapi.state,
                                   globopts['InputStateDays'.lower()])

            if not webapi.state:
                continue

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp(1).replace('_', '-')

                ams = output.AmsPublish(ams_opts['amshost'],
                                        ams_opts['amsproject'],
                                        ams_opts['amstoken'],
                                        ams_opts['amstopic'],
                                        confcust.get_jobdir(job),
                                        ams_opts['amsbulk'],
                                        ams_opts['amspacksinglemsg'],
                                        logger,
                                        int(globopts['ConnectionRetry'.lower()]),
                                        int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasMetricProfile'.lower()], 'metric_profile',
                         partdate, fetched_profiles)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger, globopts['OutputMetricProfile'.lower()], jobdir, fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger, globopts['OutputMetricProfile'.lower()], jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasMetricProfile'.lower()], filename)
                ret, excep = avro.write(fetched_profiles)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' % (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job + ' Profiles:%s Tuples:%d' % (', '.join(profiles), len(fetched_profiles)))
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(
        description='Fetch metric profile for every job of the customer')
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    loop = uvloop.new_event_loop()
    asyncio.set_event_loop(loop)

    for cust in confcust.get_customers():
        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            profiles = confcust.get_profiles(job)
            webapi_custopts = confcust.get_webapiopts(cust)
            webapi_opts = cglob.merge_opts(webapi_custopts, 'webapi')
            webapi_complete, missopt = cglob.is_complete(webapi_opts, 'webapi')

            if not webapi_complete:
                logger.error(
                    'Customer:%s Job:%s %s options incomplete, missing %s' %
                    (custname, logger.job, 'webapi', ' '.join(missopt)))
                continue

            try:
                res = loop.run_until_complete(
                    fetch_data(webapi_opts['webapihost'],
                               webapi_opts['webapitoken']))

                fetched_profiles = parse_source(res, profiles,
                                                confcust.get_namespace(job))

                loop.run_until_complete(
                    write_state(cust, job, confcust, fixed_date, True))

                if eval(globopts['GeneralWriteAvro'.lower()]):
                    write_avro(cust, job, confcust, fixed_date,
                               fetched_profiles)

                logger.info('Customer:' + custname + ' Job:' + job +
                            ' Profiles:%s Tuples:%d' %
                            (', '.join(profiles), len(fetched_profiles)))

            except (ConnectorHttpError, KeyboardInterrupt,
                    ConnectorParseError) as exc:
                logger.error(repr(exc))
                loop.run_until_complete(
                    write_state(cust, job, confcust, fixed_date, False))
def main():
    global logger, globopts
    parser = argparse.ArgumentParser(
        description=
        'Fetch POEM profile for every job of the customer and write POEM expanded profiles needed for prefilter for EGI customer'
    )
    parser.add_argument('-c',
                        dest='custconf',
                        nargs=1,
                        metavar='customer.conf',
                        help='path to customer configuration file',
                        type=str,
                        required=False)
    parser.add_argument(
        '-np',
        dest='noprefilter',
        help='do not write POEM expanded profiles for prefilter',
        required=False,
        action='store_true')
    parser.add_argument('-p',
                        dest='poemconf',
                        nargs=1,
                        metavar='poem-connector.conf',
                        help='path to poem-connector configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-g',
                        dest='gloconf',
                        nargs=1,
                        metavar='global.conf',
                        help='path to global configuration file',
                        type=str,
                        required=False)
    parser.add_argument('-d',
                        dest='date',
                        metavar='YEAR-MONTH-DAY',
                        help='write data for this date',
                        type=str,
                        required=False)
    args = parser.parse_args()

    logger = Logger(os.path.basename(sys.argv[0]))

    fixed_date = None
    if args.date and date_check(args.date):
        fixed_date = args.date

    confpath = args.gloconf[0] if args.gloconf else None
    cglob = Global(sys.argv[0], confpath)
    globopts = cglob.parse()

    servers = {'PoemServer': ['Host', 'VO']}
    filterprofiles = {'FetchProfiles': ['List']}
    prefilterdata = {
        'PrefilterData':
        ['AllowedNGI', 'AllowedNGIProfiles', 'AllNGI', 'AllNGIProfiles']
    }
    global cpoem, poemopts
    confpath = args.poemconf[0] if args.poemconf else None
    cpoem = PoemConf(confpath, servers, filterprofiles, prefilterdata)
    poemopts = cpoem.parse()

    confpath = args.custconf[0] if args.custconf else None
    confcust = CustomerConf(sys.argv[0], confpath)
    confcust.parse()
    confcust.make_dirstruct()
    confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])

    customers = set(
        map(lambda c: confcust.get_custname(c), confcust.get_customers()))
    customers = customers.pop() if len(customers) == 1 else '({0})'.format(
        ','.join(customers))
    logger.customer = customers
    customers = confcust.get_customers()
    jobs = list()
    for c in customers:
        jobs = jobs + confcust.get_jobs(c)
    jobs = jobs.pop() if len(jobs) == 1 else '({0})'.format(','.join(jobs))
    logger.job = jobs

    readerInstance = PoemReader(args.noprefilter)
    ps, psa = readerInstance.getProfiles()

    if not args.noprefilter and ps:
        poempref = PrefilterPoem()
        preffname = filename_date(
            logger, globopts['PrefilterPoemExpandedProfiles'.lower()], '')
        poempref.writeProfiles(ps, preffname)

    for cust in confcust.get_customers():
        # write profiles

        custname = confcust.get_custname(cust)

        for job in confcust.get_jobs(cust):
            logger.customer = confcust.get_custname(cust)
            logger.job = job

            jobdir = confcust.get_fulldir(cust, job)
            jobstatedir = confcust.get_fullstatedir(
                globopts['InputStateSaveDir'.lower()], cust, job)

            ams_custopts = confcust.get_amsopts(cust)
            ams_opts = cglob.merge_opts(ams_custopts, 'ams')
            ams_complete, missopt = cglob.is_complete(ams_opts, 'ams')
            if not ams_complete:
                logger.error('Customer:%s %s options incomplete, missing %s' %
                             (custname, 'ams', ' '.join(missopt)))
                continue

            if fixed_date:
                output.write_state(sys.argv[0], jobstatedir,
                                   readerInstance.state,
                                   globopts['InputStateDays'.lower()],
                                   fixed_date.replace('-', '_'))
            else:
                output.write_state(sys.argv[0], jobstatedir,
                                   readerInstance.state,
                                   globopts['InputStateDays'.lower()])

            if not readerInstance.state:
                continue

            profiles = confcust.get_profiles(job)
            lfprofiles = gen_outprofiles(psa, profiles)

            if eval(globopts['GeneralPublishAms'.lower()]):
                if fixed_date:
                    partdate = fixed_date
                else:
                    partdate = datestamp().replace('_', '-')

                ams = output.AmsPublish(
                    ams_opts['amshost'], ams_opts['amsproject'],
                    ams_opts['amstoken'], ams_opts['amstopic'],
                    confcust.get_jobdir(job), ams_opts['amsbulk'],
                    ams_opts['amspacksinglemsg'], logger,
                    int(globopts['ConnectionRetry'.lower()]),
                    int(globopts['ConnectionTimeout'.lower()]))

                ams.send(globopts['AvroSchemasPoem'.lower()], 'metric_profile',
                         partdate, lfprofiles)

            if eval(globopts['GeneralWriteAvro'.lower()]):
                if fixed_date:
                    filename = filename_date(logger,
                                             globopts['OutputPoem'.lower()],
                                             jobdir,
                                             fixed_date.replace('-', '_'))
                else:
                    filename = filename_date(logger,
                                             globopts['OutputPoem'.lower()],
                                             jobdir)
                avro = output.AvroWriter(globopts['AvroSchemasPoem'.lower()],
                                         filename)
                ret, excep = avro.write(lfprofiles)
                if not ret:
                    logger.error('Customer:%s Job:%s %s' %
                                 (logger.customer, logger.job, repr(excep)))
                    raise SystemExit(1)

            logger.info('Customer:' + custname + ' Job:' + job +
                        ' Profiles:%s Tuples:%d' %
                        (','.join(profiles), len(lfprofiles)))