def test_pipeline_hostlist_enqueue(runner, queue_factory, job_completed_factory):  # pylint: disable=unused-argument
    """test hostlist enqueue"""

    next_queue = queue_factory.create(
        name='test queue ack scan',
        config=yaml_dump({'module': 'nmap', 'args': 'arg1'}),
    )
    queue = queue_factory.create(
        name='test queue ipv6 dns discover',
        config=yaml_dump({'module': 'six_dns_discover', 'delay': '1'}),
    )
    job_completed_factory.create(
        queue=queue,
        make_output=Path('tests/server/data/parser-six_dns_discover-job.zip').read_bytes()
    )
    current_app.config['SNER_PLANNER']['pipelines'] = [
        {
            'type': 'queue',
            'steps': [
                {'step': 'load_job', 'queue': queue.name},
                {'step': 'project_hostlist'},
                {'step': 'filter_netranges', 'netranges': ['::1/128']},
                {'step': 'enqueue', 'queue': next_queue.name},
                {'step': 'archive_job'}
            ]
        }
    ]

    Planner(oneshot=True).run()

    assert Target.query.count() == 1
示例#2
0
def test_scanning_pipeline(runner, queue_factory, job_completed_factory):  # pylint: disable=unused-argument
    """test scanning pipeline"""

    queue1 = queue_factory.create(name='queue1', config=yaml_dump({'module': 'nmap', 'args': 'arg1'}))
    job_completed_factory.create(
        queue=queue1,
        make_output=Path('tests/server/data/parser-nmap-job.zip').read_bytes()
    )
    queue_factory.create(name='queue2', config=yaml_dump({'module': 'nmap', 'args': 'arg2'}))
    queue_factory.create(name='queue3', config=yaml_dump({'module': 'nmap', 'args': 'arg3'}))

    current_app.config['SNER_PLANNER'] = yaml.safe_load("""
        step_groups:
          service_scanning:
            - step: enqueue
              queue: 'queue2'
            - step: enqueue
              queue: 'queue3'

        pipelines:
          - type: queue
            steps:
              - step: load_job
                queue: 'queue1'
              - step: filter_tarpits
              - step: project_servicelist
              - step: run_group
                name: service_scanning
              - step: archive_job
    """)

    Planner(oneshot=True).run()

    assert Target.query.count() == 10
def test_pipeline_servicelist_enqueue(runner, queue_factory, job_completed_factory):  # pylint: disable=unused-argument
    """test servicelist enqueue"""

    next_queue = queue_factory.create(
        name='test queue version scan',
        config=yaml_dump({'module': 'manymap', 'args': 'arg1'}),
    )
    queue = queue_factory.create(
        name='test queue disco',
        config=yaml_dump({'module': 'nmap', 'args': 'arg1'}),
    )
    job_completed_factory.create(
        queue=queue,
        make_output=Path('tests/server/data/parser-nmap-job.zip').read_bytes()
    )
    current_app.config['SNER_PLANNER']['pipelines'] = [
        {
            'type': 'queue',
            'steps': [
                {'step': 'load_job', 'queue': queue.name},
                {'step': 'project_servicelist'},
                {'step': 'enqueue', 'queue': next_queue.name},
                {'step': 'archive_job'}
            ]
        }
    ]

    Planner(oneshot=True).run()

    assert Target.query.count() == 5
示例#4
0
def test_discovery_pipeline(runner, queue_factory):  # pylint: disable=unused-argument
    """test discovery pipeline"""

    queue_factory.create(name='queue1', config=yaml_dump({'module': 'nmap', 'args': 'arg1'}))
    queue_factory.create(name='queue2', config=yaml_dump({'module': 'nmap', 'args': 'arg2'}))

    current_app.config['SNER_PLANNER'] = yaml.safe_load("""
        common:
          home_netranges_ipv4: &home_netranges_ipv4 ['127.0.0.0/24']

        step_groups:
          service_discovery:
            - step: enqueue
              queue: 'queue1'
            - step: enqueue
              queue: 'queue2'

        pipelines:
          - type: interval
            name: discover_ipv4
            interval: 120days
            steps:
              - step: enumerate_ipv4
                netranges: *home_netranges_ipv4
              - step: run_group
                name: service_discovery
    """)

    Planner(oneshot=True).run()

    assert Target.query.count() == 2*256
示例#5
0
文件: test_steps.py 项目: bodik/sner4
def test_load_import_archive(app, queue_factory, job_completed_factory):  # pylint: disable=unused-argument
    """test load, import, archive steps"""

    queue = queue_factory.create(
        name='test queue',
        config=yaml_dump({
            'module': 'nmap',
            'args': 'arg1'
        }),
    )
    job_completed_factory.create(
        queue=queue,
        make_output=Path('tests/server/data/parser-nmap-job.zip').read_bytes())
    ctx = Context()

    ctx = load_job(ctx, queue.name)
    assert ctx.job
    assert len(ctx.data.hosts) == 1
    assert len(ctx.data.services) == 5

    ctx = import_job(ctx)
    assert len(Host.query.all()) == 1
    assert len(Service.query.all()) == 5

    ctx = archive_job(ctx)
    archive_path = Path(
        current_app.config['SNER_VAR']) / 'planner_archive' / f'{ctx.job.id}'
    assert archive_path.exists()

    # trigger stop pipeline when no job finished
    with pytest.raises(StopPipeline):
        ctx = load_job(ctx, queue.name)
示例#6
0
def test_pipeline_rescan_services(runner, host_factory, service_factory, queue_factory):  # pylint: disable=unused-argument
    """test rescan_services pipeline"""

    service_factory.create(host=host_factory.create(address='127.0.0.1'))
    service_factory.create(host=host_factory.create(address='::1'))
    queue_factory.create(
        name='test vscan',
        config=yaml_dump({'module': 'nmap', 'args': 'arg1'}),
    )
    current_app.config['SNER_PLANNER']['pipelines'] = [
        {
            'type': 'generic',
            'steps': [
                {
                    'step': 'rescan_services',
                    'interval': '0s',
                    'queue': 'test vscan',
                }
            ]
        }
    ]

    Planner(oneshot=True).run()

    assert Target.query.count() == 2
def test_pipeline_import_job(runner, queue_factory, job_completed_factory):  # pylint: disable=unused-argument
    """test import typed pipeline"""

    queue = queue_factory.create(
        name='test queue disco',
        config=yaml_dump({'module': 'nmap', 'args': 'arg1'}),
    )
    job_completed_factory.create(
        queue=queue,
        make_output=Path('tests/server/data/parser-nmap-job.zip').read_bytes()
    )
    current_app.config['SNER_PLANNER']['pipelines'] = [
        {
            'type': 'queue',
            'steps': [
                {'step': 'load_job', 'queue': queue.name},
                {'step': 'import_job'},
                {'step': 'archive_job'}
            ]
        }
    ]

    Planner(oneshot=True).run()

    assert len(Host.query.all()) == 1
    assert len(Service.query.all()) == 5
示例#8
0
def longrun_target(longrun_a, queue_factory, target_factory):  # pylint: disable=redefined-outer-name
    """queue target fixture"""

    queue = queue_factory.create(name='testqueue',
                                 config=yaml_dump(longrun_a['config']))
    target = target_factory.create(queue=queue, target=longrun_a['targets'][0])
    yield target
示例#9
0
class QueueFactory(BaseModelFactory):  # pylint: disable=too-few-public-methods
    """test queue model factory"""
    class Meta:  # pylint: disable=too-few-public-methods
        """test queue model factory"""
        model = Queue

    name = 'testqueue'
    config = yaml_dump({'module': 'dummy', 'args': '--arg1 abc --arg2'})
    group_size = 1
    priority = 10
    active = True
示例#10
0
文件: conftest.py 项目: bodik/sner4
def dummy_target(queue_factory, target_factory):  # pylint: disable=redefined-outer-name
    """dummy target fixture"""

    config = {
        'module': 'dummy',
        'args': '--arg1',
    }

    queue = queue_factory.create(name='testqueue', config=yaml_dump(config))
    target = target_factory.create(queue=queue, target='target1')
    yield target
示例#11
0
文件: test_steps.py 项目: bodik/sner4
def test_rescan_hosts(app, host_factory, queue_factory):  # pylint: disable=unused-argument
    """test rescan_hosts"""

    host_factory.create(address='127.0.0.1')
    host_factory.create(address='::1')
    queue_factory.create(
        name='test vscan',
        config=yaml_dump({
            'module': 'nmap',
            'args': 'arg1'
        }),
    )

    ctx = rescan_hosts(Context(), '0s')

    assert len(ctx.data) == 2
示例#12
0
def test_pipeline_rescan_services_largedataset(runner, queue_factory, host_factory):  # pylint: disable=unused-argument
    """test rescan_services pipeline testing with large dataset"""

    logger = logging.getLogger(__name__)

    logger.info('lot_of_targets prepare start')
    queue = queue_factory.create(
        name='test vscan',
        config=yaml_dump({'module': 'nmap', 'args': 'arg1'}),
    )
    existing_targets_count = 10**6
    # bypass all db layers for performance
    query = 'INSERT INTO target (queue_id, target) VALUES ' + ','.join([str((queue.id, str(idx))) for idx in range(existing_targets_count)])
    db.session.execute(query)
    logger.info('lot_of_targets prepare end')

    logger.info('lot_of_services prepare start')
    for addr in range(10):
        host = host_factory.create(address=str(ip_address(addr)))
        # bypass all db layers for performance
        query = 'INSERT INTO service (host_id, proto, port, tags) VALUES ' + ','.join([str((host.id, 'tcp', str(idx), '{}')) for idx in range(64000)])
        db.session.execute(query)
        logging.info('prepared %s', host)
    logger.info('lot_of_services prepare end')

    db.session.expire_all()

    current_app.config['SNER_PLANNER']['pipelines'] = [
        {
            'type': 'generic',
            'steps': [
                {
                    'step': 'rescan_services',
                    'interval': '0s',
                    'queue': 'test vscan',
                }
            ]
        }
    ]

    Planner(oneshot=True).run()

    assert Target.query.count() == existing_targets_count + Service.query.count()
示例#13
0
def test_import_pipeline(runner, queue_factory, job_completed_factory):  # pylint: disable=unused-argument
    """test import pipeline"""

    queue = queue_factory.create(name='queue1', config=yaml_dump({'module': 'nmap', 'args': 'arg1'}))
    job_completed_factory.create(queue=queue, make_output=Path('tests/server/data/parser-nmap-job.zip').read_bytes())

    current_app.config['SNER_PLANNER'] = yaml.safe_load("""
        pipelines:
          - type: queue
            steps:
              - step: load_job
                queue: 'queue1'
              - step: import_job
              - step: archive_job
    """)

    Planner(oneshot=True).run()

    assert len(Host.query.all()) == 1
    assert len(Service.query.all()) == 5
示例#14
0
文件: db_command.py 项目: bodik/sner4
def initdata():  # pylint: disable=too-many-statements
    """put initial data to database"""

    # auth test data
    db.session.add(
        User(username='******',
             active=True,
             roles=['user', 'operator', 'admin']))

    # scheduler test data
    db.session.add(
        Excl(family=ExclFamily.network,
             value='127.66.66.0/26',
             comment='blacklist 1'))
    db.session.add(
        Excl(family=ExclFamily.regex,
             value=r'^tcp://.*:22$',
             comment='avoid ssh'))

    queue = Queue(name='dev dummy',
                  config=yaml_dump({
                      'module': 'dummy',
                      'args': '--dummyparam 1'
                  }),
                  group_size=2,
                  priority=10,
                  active=True)
    db.session.add(queue)
    for target in range(3):
        db.session.add(Target(target=target, queue=queue))

    db.session.add(
        Queue(
            name='pentest full syn scan',
            config=yaml_dump({
                'module':
                'nmap',
                'args':
                '-sS -A -p1-65535 -Pn  --max-retries 3 --script-timeout 10m --min-hostgroup 20 --min-rate 900 --max-rate 1500'
            }),
            group_size=20,
            priority=10,
        ))

    db.session.add(
        Queue(
            name='disco syn scan top10000',
            config=yaml_dump({
                'module': 'nmap',
                'args': '-sS --top-ports 10000 -Pn',
                'timing_perhost': 4
            }),
            group_size=1000,
            priority=10,
        ))

    db.session.add(
        Queue(
            name='disco ipv6 dns discover',
            config=yaml_dump({
                'module': 'six_dns_discover',
                'delay': 1
            }),
            group_size=1000,
            priority=10,
        ))

    db.session.add(
        Queue(
            name='disco ipv6 enum discover',
            config=yaml_dump({
                'module': 'six_enum_discover',
                'rate': 100
            }),
            group_size=5,
            priority=10,
        ))

    db.session.add(
        Queue(
            name='data version scan basic',
            config=yaml_dump({
                'module': 'manymap',
                'args': '-sV --version-intensity 4 -Pn',
                'delay': 5
            }),
            group_size=50,
            priority=15,
        ))

    db.session.add(
        Queue(
            name='data jarm scan',
            config=yaml_dump({
                'module': 'jarm',
                'delay': 5
            }),
            group_size=50,
            priority=15,
        ))

    db.session.add(
        Queue(
            name='data script scan basic',
            config=yaml_dump({
                'module': 'manymap',
                'args':
                '-sS --script default,http-headers,ldap-rootdse,ssl-cert,ssl-enum-ciphers,ssh-auth-methods --script-timeout 10m -Pn',
                'delay': 5
            }),
            group_size=50,
            priority=15,
        ))

    # storage test data host1
    aggregable_vuln = {
        'name': 'aggregable vuln',
        'xtype': 'x.agg',
        'severity': SeverityEnum.medium
    }

    host = Host(address='127.4.4.4',
                hostname='testhost.testdomain.test<script>alert(1);</script>',
                os='Test Linux 1',
                comment='a some unknown service server')
    db.session.add(host)

    db.session.add(
        Service(host=host,
                proto='tcp',
                port=12345,
                state='open:testreason',
                name='svcx',
                info='testservice banner',
                comment='manual testservice comment'))

    db.session.add(Vuln(host=host, **aggregable_vuln))

    # storage test data host2
    host = Host(address='127.3.3.3',
                hostname='testhost1.testdomain.test',
                os='Test Linux 2',
                comment='another server')
    db.session.add(host)

    db.session.add(
        Service(host=host,
                proto='tcp',
                port=12345,
                state='closed:testreason',
                name='svcx'))

    db.session.add(
        Vuln(host=host,
             name='test vulnerability',
             xtype='testxtype.123',
             severity=SeverityEnum.critical,
             comment='a test vulnerability comment',
             refs=['ref1', 'ref2'],
             tags=['tag1', 'tag2']))

    db.session.add(
        Vuln(host=host,
             name='another test vulnerability',
             xtype='testxtype.124',
             severity=SeverityEnum.high,
             comment='another vulnerability comment',
             tags=None))

    db.session.add(
        Vuln(host=host,
             name='vulnerability1',
             xtype='testxtype.124',
             severity=SeverityEnum.medium,
             tags=['info']))

    db.session.add(
        Vuln(host=host,
             name='vulnerability2',
             xtype='testxtype.124',
             severity=SeverityEnum.low,
             tags=['report']))

    db.session.add(
        Vuln(host=host,
             name='vulnerability2',
             xtype='testxtype.124',
             severity=SeverityEnum.info,
             tags=['info']))

    db.session.add(
        Vuln(host=host,
             service=Service.query.first(),
             name='vulnerability3',
             xtype='testxtype.124',
             severity=SeverityEnum.unknown,
             tags=['report']))

    db.session.add(Vuln(host=host, **aggregable_vuln))

    db.session.add(
        Note(host=host,
             xtype='sner.testnote',
             data='testnote data',
             comment='test note comment'))

    db.session.commit()