Пример #1
0
def test_nrs_msa_nod():
    pool = combine_pools(t_path('data/pool_023_nirspec_msa_3nod.csv'))
    all_candidates = constrain_on_candidates(None)
    asns = generate(pool, registry_level2_only(global_constraints=all_candidates))
    assert len(asns) == 12
    for asn in asns:
        assert len(asn['products'][0]['members']) == 3
def miri_params(request):
    cid, asn_type, asn_name, product_name = request.param
    pool = combine_pools(t_path('data/pool_007_spec_miri.csv'))
    gc = constrain_on_candidates((cid, ))
    rules = registry_level3_only(global_constraints=gc)
    asns = generate(pool, rules)
    return asns, asn_type, asn_name, product_name
Пример #3
0
def test_generate(full_pool_rules):
    """Run a full sized pool using all rules"""
    pool, rules, pool_fname = full_pool_rules
    asns = generate(pool, rules)
    assert len(asns) == 95
    for asn in asns:
        asn_name, asn_store = asn.dump()
        asn_table = load_asn(asn_store)
        schemas = rules.validate(asn_table)
        assert len(schemas) > 0
Пример #4
0
def test_serialize(full_pool_rules):
    """Test serializing roundtripping"""
    pool, rules, pool_fname = full_pool_rules
    asns = generate(pool, rules)
    for asn in asns:
        for format in asn.ioregistry:
            fname, serialized = asn.dump(format=format)
            assert serialized is not None
            recovered = load_asn(serialized)
            assert recovered is not None
Пример #5
0
def test_generate(full_pool_rules):
    """Run a full sized pool using all rules"""
    pool, rules, pool_fname = full_pool_rules
    asns = generate(pool, rules)
    assert len(asns) == 35
    for asn in asns:
        asn_name, asn_store = asn.dump()
        asn_table = load_asn(asn_store)
        schemas = rules.validate(asn_table)
        assert len(schemas) > 0
Пример #6
0
def test_simple():
    """Test generate on simple registry"""
    registry = AssociationRegistry([t_path('data/rules_basic.py')],
                                   include_default=False)
    pool = AssociationPool()
    pool['value'] = ['row1', 'row2']

    asns = generate(pool, registry)
    assert len(asns) == 1
    assert len(asns[0]['members']) == 2
Пример #7
0
def test_level2_asn_names_with_version(pool_params):
    pool_path = helpers.t_path(pool_params)
    pool = helpers.combine_pools(pool_path)
    rules = helpers.registry_level2_only(global_constraints=all_candidates)
    asns = generate(pool, rules, version_id=True)
    assert len(asns) > 0
    for asn in asns:
        name = asn.asn_name
        m = re.match(LEVEL3_ASN_WITH_VERSION, name)
        assert m is not None
Пример #8
0
def test_serialize(full_pool_rules):
    """Test serializing roundtripping"""
    pool, rules, pool_fname = full_pool_rules
    asns = generate(pool, rules)
    for asn in asns:
        for format in asn.ioregistry:
            fname, serialized = asn.dump(format=format)
            assert serialized is not None
            recovered = load_asn(serialized)
            assert recovered is not None
Пример #9
0
def test_global_constraints(constraints, pool, n_asns):
    """Test that global constraints get applied to all rules"""
    rules = AssociationRegistry(global_constraints=constraints)
    assert len(rules) >= 3
    for constraint in constraints:
        for rule in rules:
            assert constraint in rules[rule].GLOBAL_CONSTRAINT

    pool = helpers.combine_pools(pool)
    asns = generate(pool, rules)
    assert len(asns) == n_asns
Пример #10
0
def test_level35_names(pool_file):
    rules = registry_level3_only()
    pool = AssociationPool.read(pool_file)
    asns = generate(pool, rules)
    for asn in asns:
        product_name = asn['products'][0]['name']
        if asn['asn_rule'] == 'Asn_IFU':
            m = re.match(LEVEL3_PRODUCT_NAME_NO_OPTELEM_REGEX, product_name)
        else:
            m = re.match(LEVEL3_PRODUCT_NAME_REGEX, product_name)
        assert m is not None
Пример #11
0
def test_level3_productname_components_discovered():
    rules = registry_level3_only()
    pool = combine_pools(t_path('data/pool_002_image_miri.csv'))
    asns = generate(pool, rules)
    asn = asns[0]
    match = re.match(LEVEL3_PRODUCT_NAME_REGEX, asn['products'][0]['name'])
    assert match is not None
    matches = match.groupdict()
    assert matches['program'] == '99009'
    assert matches['acid'] == 'a3001'
    assert matches['target'] == 't001'
    assert matches['instrument'] == 'miri'
    assert matches['opt_elem'] == 'f560w'
Пример #12
0
def test_nrs_fixedslit_nod():
    """Test NIRSpec Fixed-slit background nods"""
    pool = combine_pools(t_path('data/pool_024_nirspec_fss_nods.csv'))
    constraint_all_candidates = constrain_on_candidates(None)
    asns = generate(pool, registry_level2_only(
        global_constraints=constraint_all_candidates)
    )
    assert len(asns) == 30
    for asn in asns:
        n_dithers = int(asn.constraints['nods'].value)
        n_spectral_dithers = int(asn.constraints['subpxpts'].value)
        #  Expect self + all exposures not at the same primary dither
        n_members = n_dithers - n_spectral_dithers + 1
        assert len(asn['products'][0]['members']) == n_members
Пример #13
0
def test_level3_names(pool_file, global_constraints):
    rules = registry_level3_only(
        global_constraints=global_constraints
    )
    pool = AssociationPool.read(pool_file)
    asns = generate(pool, rules)
    for asn in asns:
        product_name = asn['products'][0]['name']
        if asn['asn_rule'] == 'Asn_Lv3MIRMRS':
            m = re.match(LEVEL3_PRODUCT_NAME_NO_OPTELEM_REGEX, product_name)
        else:
            m = re.match(LEVEL3_PRODUCT_NAME_REGEX, product_name)
        assert m is not None
        assert m.groupdict()['acid'] == 'o002'
Пример #14
0
def test_nrs_fixedslit_nod():
    """Test NIRSpec Fixed-slit background nods"""
    pool = combine_pools(t_path('data/pool_024_nirspec_fss_nods.csv'))
    constraint_all_candidates = constrain_on_candidates(None)
    asns = generate(
        pool,
        registry_level2_only(global_constraints=constraint_all_candidates))
    assert len(asns) == 30
    for asn in asns:
        nods = int(asn.constraints['nods'].value)
        multiplier = DITHER_PATTERN_MULTIPLIER[
            asn.constraints['subpxpts'].value]
        n_members = nods * multiplier
        assert len(asn['products'][0]['members']) == n_members
Пример #15
0
def test_targacq(pool_file):
    """Test for existence of target acquisitions in associatons"""
    rules = registry_level3_only()
    pool = combine_pools(t_path(pool_file))
    asns = generate(pool, rules)
    assert len(asns) > 0
    for asn in asns:
        # Ignore reprocessed asn's with only science
        if not asn['asn_rule'] in ["Asn_Lv3SpecAux", "Asn_Lv3NRSIFUBackground"]:
            for product in asn['products']:
                exptypes = [
                    member['exptype'].lower()
                    for member in product['members']
                    ]
                assert 'target_acquisition' in exptypes
Пример #16
0
def test_duplicate_names():
    """
    For Level 3 association, there should be no association
    with the same product name. Generation should produce
    log messages indicating when duplicate names have been found.
    """
    pool = AssociationPool.read(t_path('data/jw00632_dups.csv'))
    constrain_all_candidates = constrain_on_candidates(None)
    rules = registry_level3_only(global_constraints=constrain_all_candidates)

    with pytest.warns(RuntimeWarning):
        asns = generate(pool, rules)

    # There should only be one association left.
    assert len(asns) == 1
Пример #17
0
def test_level2_asn_names(pool_params):
    pool_path = helpers.t_path(pool_params)
    pool = helpers.combine_pools(pool_path)
    rules = helpers.registry_level2_only(global_constraints=all_candidates)
    asns = generate(pool, rules)
    assert len(asns) > 0
    for asn in asns:
        name = asn.asn_name
        if any(
                getattr(c, 'is_acid', False)
                for c in asn.constraints
        ):
            m = re.match(LEVEL3_ASN_ACID_NAME_REGEX, name)
        else:
            m = re.match(LEVEL3_ASN_DISCOVERED_NAME_REGEX, name)
        assert m is not None
Пример #18
0
def test_meta():
    rules = registry_level3_only()
    pool = combine_pools(t_path('data/pool_002_image_miri.csv'))
    asns = generate(pool, rules)
    assert len(asns) == 1
    asn = asns[0]
    data = asn.data
    assert data['program'] == '99009'
    assert data['target'] == 't001'
    assert data['asn_type'] == 'image3'
    assert data['asn_id'] == 'a3001'
    assert data['asn_pool'] == 'pool_002_image_miri'
    assert data['asn_rule'] == 'Asn_Lv3Image'
    assert data['degraded_status'] == 'No known degraded exposures in association.'
    assert data['version_id'] is None
    assert data['constraints'] is not None
Пример #19
0
def test_nrs_fixedslit_nod_chop():
    """Test NIRSpec Fixed-slit background nods"""
    pool = combine_pools(t_path('data/pool_025_nirspec_fss_nod_chop.csv'))
    constraint_all_candidates = constrain_on_candidates(None)
    asns = generate(pool, registry_level2_only(
        global_constraints=constraint_all_candidates)
    )
    assert len(asns) == 8
    for asn in asns:
        assert asn['asn_rule'] in ['Asn_Lv2NRSFSS', 'Asn_Lv2SpecSpecial']
        if asn['asn_rule'] == 'Asn_Lv2SpecSpecial':
            assert len(asn['products'][0]['members']) == 1
        else:
            nods = int(asn.constraints['nods'].value)
            if asn['asn_id'].startswith('c'):
                nods += 1
            assert len(asn['products'][0]['members']) == nods
Пример #20
0
def test_multiple_optelems(pool_file):
    rules = registry_level3_only()
    pool = AssociationPool.read(pool_file)
    asns = generate(pool, rules)
    for asn in asns:
        product_name = asn['products'][0]['name']
        if asn['asn_rule'] != 'Asn_Lv3MIRMRS':
            m = re.match(LEVEL3_PRODUCT_NAME_REGEX, product_name)
            assert m is not None
            try:
                value = '-'.join(asn.constraints['opt_elem2'].found_values)
            except KeyError:
                value = None
            if value in EMPTY:
                assert '-' not in m.groupdict()['opt_elem']
            else:
                assert '-' in m.groupdict()['opt_elem']
Пример #21
0
 def test_run_generate(self):
     rules = registry_level3_only()
     for ppars in self.pools:
         pool = combine_pools(ppars.path, **ppars.kwargs)
         asns = generate(pool, rules)
         assert len(asns) == ppars.n_asns, \
             ppars.path + ': n_asns not expected {} {}'.format(len(asns), ppars.n_asns)
         for asn, candidates in zip(asns, ppars.candidates):
             assert set(asn.candidates) == set(candidates)
         file_regex = re.compile(r'.+_(?P<suffix>.+)\..+')
         for asn in asns:
             for product in asn['products']:
                 for member in product['members']:
                     if member['exptype'] == 'science':
                         match = file_regex.match(member['expname'])
                         assert match is not None, \
                             ppars.path + ': No suffix match for {}'.format(member['expname'])
                         assert match.groupdict()['suffix'] in ppars.valid_suffixes, \
                             ppars.path + ': Suffix {} not valid'.format(match.groupdict()['suffix'])
Пример #22
0
def test_level3_productname_components_acid():
    global_constraints = DMSAttrConstraint(
        name='asn_candidate_ids',
        value='.+o001.+',
        sources=['asn_candidate'],
        force_unique=True,
        is_acid=True,
        evaluate=True,
    )
    rules = registry_level3_only(global_constraints=global_constraints)
    pool = combine_pools(t_path('data/pool_002_image_miri.csv'))
    asns = generate(pool, rules)
    asn = asns[0]
    match = re.match(LEVEL3_PRODUCT_NAME_REGEX, asn['products'][0]['name'])
    assert match is not None
    matches = match.groupdict()
    assert matches['program'] == '99009'
    assert matches['acid'] == 'o001'
    assert matches['target'] == 't001'
    assert matches['instrument'] == 'miri'
    assert matches['opt_elem'] == 'f560w'
Пример #23
0
def test_duplicate_generate():
    """Test for duplicate/overwrite association

    The pool has two exposures, one without a valid `asn_candidate`,
    and one with a valid observation `asn_candidate`.
    When set with the "all candidates" constraint, only one association
    should be made.

    The prompt for this test was that three associations were being created,
    two of which were the observation candidate, with the second
    being a duplicate of the first. The third was an extraneous
    discovered candidate.
    """
    pool = AssociationPool.read(t_path('data/pool_duplicate.csv'))
    constrain_all_candidates = constrain_on_candidates(None)
    rules = registry_level3_only(global_constraints=constrain_all_candidates)
    asns = generate(pool, rules)
    assert len(asns) == 1
    asn = asns[0]
    assert asn['asn_type'] == 'image3'
    assert asn['asn_id'] == 'o029'
Пример #24
0
def generate_from_pool(pool_path):
    """Generate associations from pools"""
    rules = registry_level2_only()
    pool = combine_pools(t_path(pool_path))
    asns = generate(pool, rules)
    return asns
Пример #25
0
    def __init__(self, args=None, pool=None):

        if args is None:
            args = sys.argv[1:]
        if isinstance(args, str):
            args = args.split(' ')

        parser = argparse.ArgumentParser(
            description='Generate Assocation Data Products',
            usage='asn_generate pool'
        )
        if pool is None:
            parser.add_argument(
                'pool', type=str, help='Association Pool'
            )
        op_group = parser.add_mutually_exclusive_group()
        op_group.add_argument(
            '-i', '--ids', nargs='+',
            dest='asn_candidate_ids',
            help='space-separated list of association candidate IDs to operate on.'
        )
        op_group.add_argument(
            '--discover',
            action='store_true',
            help='Produce discovered associations'
        )
        op_group.add_argument(
            '--all-candidates',
            action='store_true', dest='all_candidates',
            help='Produce all association candidate-specific associations'
        )
        parser.add_argument(
            '-p', '--path', type=str,
            default='.',
            help='Folder to save the associations to. Default: "%(default)s"'
        )
        parser.add_argument(
            '--save-orphans', dest='save_orphans',
            nargs='?', const='orphaned.csv', default=False,
            help='Save orphaned items into the specified table. Default: "%(default)s"'
        )
        parser.add_argument(
            '--version-id', dest='version_id',
            nargs='?', const=True, default=None,
            help=(
                'Version tag to add into association name and products.'
                ' If not specified, no version will be used.'
                ' If specified without a value, the current time is used.'
                ' Otherwise, the specified string will be used.'
            )
        )
        parser.add_argument(
            '-r', '--rules', action='append',
            help='Association Rules file.'
        )
        parser.add_argument(
            '--ignore-default', action='store_true',
            help='Do not include default rules. -r should be used if set.'
        )
        parser.add_argument(
            '--dry-run',
            action='store_true', dest='dry_run',
            help='Execute but do not save results.'
        )
        parser.add_argument(
            '-d', '--delimiter', type=str,
            default='|',
            help='''Delimiter
            to use if pool files are comma-separated-value
            (csv) type files. Default: "%(default)s"
            '''
        )
        parser.add_argument(
            '--pool-format', type=str,
            default='ascii',
            help=(
                'Format of the pool file.'
                ' Any format allowed by the astropy'
                ' Unified File I/O interface is allowed.'
                ' Default: "%(default)s"'
            )
        )
        parser.add_argument(
            '-v', '--verbose',
            action='store_const', dest='loglevel',
            const=logging.INFO, default=logging.NOTSET,
            help='Output progress and results.'
        )
        parser.add_argument(
            '-D', '--debug',
            action='store_const', dest='loglevel',
            const=logging.DEBUG,
            help='Output detailed debugging information.'
        )
        parser.add_argument(
            '--DMS',
            action='store_true', dest='DMS_enabled',
            help='Running under DMS workflow conditions.'
        )
        parser.add_argument(
            '--format',
            default='json',
            help='Format of the association files. Default: "%(default)s"'
        )
        parser.add_argument(
            '--version', action='version',
            version='%(prog)s {}'.format(__version__),
            help='Version of the generator.'
        )
        parser.add_argument(
            '--no-merge', action='store_true',
            help='Do not merge Level2 associations into one'
        )

        parsed = parser.parse_args(args=args)

        # Configure logging
        config = None
        if parsed.DMS_enabled:
            config = DMS_config
        logger = log_config(name=__package__, config=config)
        logger.setLevel(parsed.loglevel)

        # Preamble
        logger.info('Command-line arguments: {}'.format(args))
        logger.context.set('asn_candidate_ids', parsed.asn_candidate_ids)

        if pool is None:
            logger.info('Reading pool {}'.format(parsed.pool))
            self.pool = AssociationPool.read(
                parsed.pool, delimiter=parsed.delimiter,
                format=parsed.pool_format,
            )
        else:
            self.pool = pool

        # DMS: Add further info to logging.
        try:
            logger.context.set('program', self.pool[0]['PROGRAM'])
        except KeyError:
            pass

        # Determine mode of operation. Options are
        #  1) Only specified candidates
        #  2) Only discovered assocations that do not match
        #     candidate associations
        #  3) Both discovered and all candidate associations.
        logger.info('Reading rules.')
        if not parsed.discover and\
           not parsed.all_candidates and\
           parsed.asn_candidate_ids is None:
            parsed.discover = True
            parsed.all_candidates = True
        if parsed.discover or parsed.all_candidates:
            global_constraints = constrain_on_candidates(
                None
            )
        elif parsed.asn_candidate_ids is not None:
            global_constraints = constrain_on_candidates(
                parsed.asn_candidate_ids
            )

        self.rules = AssociationRegistry(
            parsed.rules,
            include_default=not parsed.ignore_default,
            global_constraints=global_constraints,
            name=CANDIDATE_RULESET
        )

        if parsed.discover:
            self.rules.update(
                AssociationRegistry(
                    parsed.rules,
                    include_default=not parsed.ignore_default,
                    name=DISCOVER_RULESET
                )
            )

        logger.info('Generating associations.')
        self.associations = generate(
            self.pool, self.rules, version_id=parsed.version_id
        )

        if parsed.discover:
            logger.debug(
                '# asns found before discover filtering={}'.format(
                    len(self.associations)
                )
            )
            self.associations = filter_discovered_only(
                self.associations,
                DISCOVER_RULESET,
                CANDIDATE_RULESET,
                keep_candidates=parsed.all_candidates,
            )
            self.rules.Utility.resequence(self.associations)

        # Do a grand merging. This is done particularly for
        # Level2 associations.
        if not parsed.no_merge:
            try:
                self.associations = self.rules.Utility.merge_asns(self.associations)
            except AttributeError:
                pass

        logger.info(self.__str__())

        if not parsed.dry_run:
            self.save(
                path=parsed.path,
                format=parsed.format,
                save_orphans=parsed.save_orphans
            )
Пример #26
0
"""test_level3_dithers: Test of WFS rules."""

from jwst.associations.tests import helpers

from jwst.associations import generate
from jwst.associations.main import constrain_on_candidates

# Generate Level3 assocations
all_candidates = constrain_on_candidates(None)
rules = helpers.registry_level3_only(global_constraints=all_candidates)
pool = helpers.combine_pools(helpers.t_path('data/pool_004_wfs.csv'))
level3_asns = generate(pool, rules)


class TestLevel3WFS(helpers.BasePoolRule):

    pools = [
        helpers.PoolParams(path=helpers.t_path('data/pool_004_wfs.csv'),
                           n_asns=42,
                           n_orphaned=0),
    ]

    valid_rules = [
        'Asn_Lv3WFSCMB',
    ]


def test_wfs_duplicate_product_names():
    """Test for duplicate product names"""
    global level3_asns
Пример #27
0
    def __init__(self, args=None, pool=None):

        if args is None:
            args = sys.argv[1:]
        if isinstance(args, str):
            args = args.split(' ')

        parser = argparse.ArgumentParser(
            description='Generate Assocation Data Products',
            usage='asn_generate pool')
        if pool is None:
            parser.add_argument('pool', type=str, help='Association Pool')
        op_group = parser.add_mutually_exclusive_group()
        op_group.add_argument(
            '-i',
            '--ids',
            nargs='+',
            dest='asn_candidate_ids',
            help=
            'space-separated list of association candidate IDs to operate on.')
        op_group.add_argument('--discover',
                              action='store_true',
                              help='Produce discovered associations')
        op_group.add_argument(
            '--all-candidates',
            action='store_true',
            dest='all_candidates',
            help='Produce all association candidate-specific associations')
        parser.add_argument(
            '-p',
            '--path',
            type=str,
            default='.',
            help='Folder to save the associations to. Default: "%(default)s"')
        parser.add_argument(
            '--save-orphans',
            dest='save_orphans',
            nargs='?',
            const='orphaned.csv',
            default=False,
            help=
            'Save orphaned items into the specified table. Default: "%(default)s"'
        )
        parser.add_argument(
            '--version-id',
            dest='version_id',
            nargs='?',
            const=True,
            default=None,
            help=('Version tag to add into association name and products.'
                  ' If not specified, no version will be used.'
                  ' If specified without a value, the current time is used.'
                  ' Otherwise, the specified string will be used.'))
        parser.add_argument('-r',
                            '--rules',
                            action='append',
                            help='Association Rules file.')
        parser.add_argument(
            '--ignore-default',
            action='store_true',
            help='Do not include default rules. -r should be used if set.')
        parser.add_argument('--dry-run',
                            action='store_true',
                            dest='dry_run',
                            help='Execute but do not save results.')
        parser.add_argument('-d',
                            '--delimiter',
                            type=str,
                            default='|',
                            help='''Delimiter
            to use if pool files are comma-separated-value
            (csv) type files. Default: "%(default)s"
            ''')
        parser.add_argument('--pool-format',
                            type=str,
                            default='ascii',
                            help=('Format of the pool file.'
                                  ' Any format allowed by the astropy'
                                  ' Unified File I/O interface is allowed.'
                                  ' Default: "%(default)s"'))
        parser.add_argument('-v',
                            '--verbose',
                            action='store_const',
                            dest='loglevel',
                            const=logging.INFO,
                            default=logging.NOTSET,
                            help='Output progress and results.')
        parser.add_argument('-D',
                            '--debug',
                            action='store_const',
                            dest='loglevel',
                            const=logging.DEBUG,
                            help='Output detailed debugging information.')
        parser.add_argument('--DMS',
                            action='store_true',
                            dest='DMS_enabled',
                            help='Running under DMS workflow conditions.')
        parser.add_argument(
            '--format',
            default='json',
            help='Format of the association files. Default: "%(default)s"')
        parser.add_argument('--version',
                            action='version',
                            version='%(prog)s {}'.format(__version__),
                            help='Version of the generator.')
        parser.add_argument('--no-merge',
                            action='store_true',
                            help='Do not merge Level2 associations into one')

        parsed = parser.parse_args(args=args)

        # Configure logging
        config = None
        if parsed.DMS_enabled:
            config = DMS_config
        logger = log_config(name=__package__, config=config)
        logger.setLevel(parsed.loglevel)

        # Preamble
        logger.info('Command-line arguments: {}'.format(args))
        logger.context.set('asn_candidate_ids', parsed.asn_candidate_ids)

        if pool is None:
            logger.info('Reading pool {}'.format(parsed.pool))
            self.pool = AssociationPool.read(
                parsed.pool,
                delimiter=parsed.delimiter,
                format=parsed.pool_format,
            )
        else:
            self.pool = pool

        # DMS: Add further info to logging.
        try:
            logger.context.set('program', self.pool[0]['PROGRAM'])
        except KeyError:
            pass

        # Determine mode of operation. Options are
        #  1) Only specified candidates
        #  2) Only discovered assocations that do not match
        #     candidate associations
        #  3) Both discovered and all candidate associations.
        logger.info('Reading rules.')
        if not parsed.discover and\
           not parsed.all_candidates and\
           parsed.asn_candidate_ids is None:
            parsed.discover = True
            parsed.all_candidates = True
        if parsed.discover or parsed.all_candidates:
            global_constraints = constrain_on_candidates(None)
        elif parsed.asn_candidate_ids is not None:
            global_constraints = constrain_on_candidates(
                parsed.asn_candidate_ids)

        self.rules = AssociationRegistry(
            parsed.rules,
            include_default=not parsed.ignore_default,
            global_constraints=global_constraints,
            name=CANDIDATE_RULESET)

        if parsed.discover:
            self.rules.update(
                AssociationRegistry(parsed.rules,
                                    include_default=not parsed.ignore_default,
                                    name=DISCOVER_RULESET))

        logger.info('Generating associations.')
        self.associations = generate(self.pool,
                                     self.rules,
                                     version_id=parsed.version_id)

        if parsed.discover:
            logger.debug('# asns found before discover filtering={}'.format(
                len(self.associations)))
            self.associations = filter_discovered_only(
                self.associations,
                DISCOVER_RULESET,
                CANDIDATE_RULESET,
                keep_candidates=parsed.all_candidates,
            )
            self.rules.Utility.resequence(self.associations)

        # Do a grand merging. This is done particularly for
        # Level2 associations.
        if not parsed.no_merge:
            try:
                self.associations = self.rules.Utility.merge_asns(
                    self.associations)
            except AttributeError:
                pass

        logger.info(self.__str__())

        if not parsed.dry_run:
            self.save(path=parsed.path,
                      format=parsed.format,
                      save_orphans=parsed.save_orphans)