Exemple #1
0
def policy_settings(item):
    '''
    Recursive function to attempt to pull out the various settings from scan
    policy settings in the editor format.
    '''
    resp = dict()
    if 'id' in item and ('default' in item or
                         ('type' in item and item['type'] in [
                             'file', 'checkbox', 'entry', 'textarea',
                             'medium-fixed-entry', 'password'
                         ])):
        # if we find both an 'id' and a 'default' attribute, or if we find
        # a 'type' attribute matching one of the known attribute types, then
        # we will parse out the data and append it to the response dictionary
        if not 'default' in item:
            item['default'] = ""
        resp[item['id']] = item['default']

    for key in item.keys():
        # here we will attempt to recurse down both a list of sub-
        # documents and an explicitly defined sub-document within the
        # editor data-structure.
        if key == 'modes':
            continue
        if (isinstance(item[key], list) and len(item[key]) > 0
                and isinstance(item[key][0], dict)):
            for i in item[key]:
                resp = dict_merge(resp, policy_settings(i))
        if isinstance(item[key], dict):
            resp = dict_merge(resp, policy_settings(item[key]))

    # Return the key-value pair.
    return resp
Exemple #2
0
 def edit(self,
          rule_id: int,
          plugin_id: Optional[int] = None,
          type: Optional[Literal['recast_critical', 'recast_high',
                                 'recast_medium', 'recast_low',
                                 'recast_info', 'exclude']] = None,
          host: Optional[str] = None,
          date: Optional[int] = None) -> None:
     '''
     Creates a new plugin rule
     
     Args:
         rule_id (int): The rule to modify
         plugin_id (int, optional): The plugin id to modify
         type: (str, optional): The type of modification to perform
         host (str, optional): The host to apply this rule to
         date (int, optional): The unix date for this rule to expire
     
     Example:
     
         >>> nessus.plugin_rules.edit(1, date=1645164000)
     '''
     rule = self.details(1)
     payload = dict_merge(
         rule,
         dict_clean({
             'plugin_id': str(plugin_id),
             'type': type,
             'host': host,
             'date': date
         }))
     return self._put(f'{rule_id}', json=payload)
Exemple #3
0
 def edit(self, 
          smtp_host: Optional[str] = None,
          smtp_port: Optional[int] = None,
          smtp_enc: Optional[Literal['No Encryption', 
                                     'Use TLS if available',
                                     'Force SSL'
                                     'Force TLS'
                                     ]] = None,
          smtp_from: Optional[str] = None,
          smtp_www_host: Optional[str] = None,
          smtp_user: Optional[str] = None,
          smtp_pass: Optional[str] = None,
          smtp_auth: Optional[Literal['NONE', 
                                      'PLAIN',
                                      'LOGIN',
                                      'NTLM',
                                      'CRAM-MD5'
                                      ]] = None
          ) -> None:
     '''
     Updates the Nessus daemon's mail settings
     
     Args:
         smtp_host (str, optional): 
             DNS/IP Address of the SMTP server
         smtp_port (int, optional): 
             Port number for the SMTP service
         smtp_enc (str, optional):
             The connection encryption for the SMTP server
         smtp_from (str, optional): 
             Reply email address for email sent by the Nessus daemon
         smtp_www_host (str, optional):
             The host to use in email links
         smtp_user (str, optional):
             The username to use when authenticating to the SMTP service
         smtp_pass (str, optional):
             The password to use when authenticating to the SMTP service
         smtp_auth (str, optional): 
             The authentication type for the SMTP server
     
     Example:
     
         >>> nessus.mail.edit(smtp_user='******',
         ...                  smtp_pass='******',
         ...                  smtp_auth='LOGIN',
         ...                  )
     '''
     current = self.details()
     updated = dict_merge(current, dict_clean({
         'smtp_host': smtp_host,
         'smtp_port': smtp_port,
         'smtp_enc': smtp_enc,
         'smtp_from': smtp_from,
         'smtp_www_host': smtp_www_host,
         'smtp_user': smtp_user,
         'smtp_pass': smtp_pass,
         'smtp_auth': smtp_auth
     }))
     self._put(json=updated)
Exemple #4
0
    def create(self,
               name: str,
               email: str,
               password: str,
               **kwargs
               ) -> List[Dict]:
        '''
        Create users

        Args:
            name (str):
                The name of new user.
            email (str):
                The email address of the user.
            password (str):
                The password for the new user.
            surname (optional, str):
                The surname of new user.
            department (optional, str):
                The department of user.
            biography (optional, str):
                The biography of user.
            active (optional, bool):
                is the user active?
            picture (optional, List[int]):
                The list of picture numbers

        Return:
            list[dict]:
                The created user objects

        Example:
            >>> tad.users.create(
            ...     name='username',
            ...     email='*****@*****.**',
            ...     password='******',
            ...     active=True
            ...     )
        '''
        payload = [
            self._schema.dump(self._schema.load(
                dict_merge({
                    'name': name,
                    'email': email,
                    'password': password
                }, kwargs)
            ))
        ]

        return self._schema.load(
            self._post(json=payload),
            many=True, unknown=INCLUDE)
Exemple #5
0
def test_dict_merge():
    assert dict_merge({'a': 1}, {'b': 2}) == {'a': 1, 'b': 2}
    assert dict_merge({
        's': {
            'a': 1
        },
        'b': 2
    }, {'s': {
        'c': 3,
        'a': 4
    }}) == {
        's': {
            'a': 4,
            'c': 3
        },
        'b': 2
    }
    assert dict_merge({'a': 1}, {'b': 2}, {'c': 3}, {'a': 5}) == {
        'a': 5,
        'b': 2,
        'c': 3
    }
Exemple #6
0
def cli(configfile, observed_since, setup_only=False, troubleshoot=False):
    '''
    Tenable.io -> Jira Cloud Transformer & Ingester
    '''
    config_from_file = yaml.load(configfile, Loader=yaml.Loader)
    config = dict_merge(base_config(), config_from_file)

    # Get the logging definition and define any defaults as need be.
    log = config.get('log', {})
    log_lvls = {'debug': 10, 'info': 20, 'warn': 30, 'error': 40}
    log['level'] = log_lvls[log.get('level', 'warn')]
    log['format'] = log.get(
        'format', '%(asctime)-15s %(name)s %(levelname)s %(message)s')

    # Configure the root logging facility
    if troubleshoot:
        logging.basicConfig(level=logging.DEBUG,
                            format=log['format'],
                            filename='tenable_debug.log')
    else:
        logging.basicConfig(**log)

    # Output some basic information detailing the config file used and the
    # python version & system arch.
    logging.info('Tenable2JiraCloud Version {}'.format(__version__))
    logging.info('Using configuration file {}'.format(configfile.name))
    uname = platform.uname()
    logging.info('Running on Python {} {}/{}'.format(
        '.'.join([str(i) for i in sys.version_info][0:3]), uname[0],
        uname[-2]))

    # instantiate the Jira object
    jira = Jira('https://{}/rest/api/3'.format(config['jira']['address']),
                config['jira']['api_username'], config['jira']['api_token'])

    # Initiate the Tenable.io API model, the Ingester model, and start the
    # ingestion and data transformation.
    if config['tenable'].get('platform') == 'tenable.io':
        source = TenableIO(access_key=config['tenable'].get('access_key'),
                           secret_key=config['tenable'].get('secret_key'),
                           vendor='Tenable',
                           product='JiraCloud',
                           build=__version__)
    elif config['tenable'].get('platform') == 'tenable.sc':
        source = TenableSC(config['tenable'].get('address'),
                           port=int(config['tenable'].get('port', 443)),
                           username=config['tenable'].get('username'),
                           password=config['tenable'].get('password'),
                           access_key=config['tenable'].get('access_key'),
                           secret_key=config['tenable'].get('secret_key'),
                           vendor='Tenable',
                           product='JiraCloud',
                           build=__version__)
    else:
        logging.error('No valid Tenable platform configuration defined.')
        exit(1)
    ingest = Tio2Jira(source, jira, config)

    if troubleshoot:
        # if the troubleshooting flag is set, then we will be collecting some
        # basic information and outputting it to the screen in a format that
        # Github issues would expect to format it all pretty.  This should help
        # reduce the amount of time that is spent with back-and-forth debugging.
        try:
            ingest.ingest(observed_since)
        except:
            logging.exception('Caught the following Exception')

        # Some basic redaction of sensitive data, such as API Keys, Usernames,
        # Passwords, and hostnames.
        addr = config_from_file['jira']['address']
        sc_addr = 'NOTHING_TO_SEE_HERE_AT_ALL'
        config_from_file['jira']['address'] = '<REDACTED>'
        config_from_file['jira']['api_token'] = '<REDACTED>'
        config_from_file['jira']['api_username'] = '******'
        config_from_file['project']['leadAccountId'] = '<REDACTED>'
        if config_from_file['tenable'].get('address'):
            sc_addr = config_from_file['tenable']['address']
            config_from_file['tenable']['address'] = '<REDACTED>'
        if config_from_file['tenable'].get('access_key'):
            config_from_file['tenable']['access_key'] = '<REDACTED>'
        if config_from_file['tenable'].get('secret_key'):
            config_from_file['tenable']['secret_key'] = '<REDACTED>'
        if config_from_file['tenable'].get('username'):
            config_from_file['tenable']['username'] = '******'
        if config_from_file['tenable'].get('password'):
            config_from_file['tenable']['password'] = '******'

        output = troubleshooting.format(
            configfile=yaml.dump(config_from_file, default_flow_style=False),
            logging=open('tenable_debug.log').read() \
                .replace(addr, '<JIRA_CLOUD_HOST>') \
                .replace(sc_addr, '<TENABLE_SC_HOST>'),
            issuetypes='\n'.join(
                [
                    '{id}: {name}'.format(**a)
                    for a in jira.issue_types.list()
                    if a.get('name').lower() in ['task', 'subtask', 'sub-task']
                ]
            )
        )
        print(output)
        print('\n'.join([
            '/-------------------------------NOTICE-----------------------------------\\',
            '| The output above is helpful for us to troubleshoot exactly what is     |',
            '| happening within the code and offer a diagnosis for how to correct.    |',
            '| Please note that while some basic redaction has already been performed |',
            '| that we ask you to review the information you\'re about to send and     |',
            '| ensure that nothing deemed sensitive is transmitted.                   |',
            '| ---------------------------------------------------------------------- |',
            '| -- Copy of output saved to "issue_debug.md"                            |',
            '\\------------------------------------------------------------------------/'
        ]))
        with open('issue_debug.md', 'w') as reportfile:
            print(output, file=reportfile)
        os.remove('tenable_debug.log')
    elif not setup_only:
        ingest.ingest(observed_since)

        # If we are expected to continually re-run the transformer, then we will
        # need to track the passage of time and run every X hours, where X is
        # defined by the user in the configuration.
        if config.get('service', {}).get('interval', 0) > 0:
            sleeper = int(config['service']['interval']) * 3600
            while True:
                last_run = int(time.time())
                logging.info('Sleeping for {}h'.format(sleeper / 3600))
                time.sleep(sleeper)
                logging.info('Initiating ingest with observed_since={}'.format(
                    last_run))
                ingest.ingest(last_run)
    elif setup_only:
        # In setup-only mode, the ingest will not run, and instead a config file
        # will be generated that will have all of the JIRA identifiers baked in
        # and will also inform the integration to ignore the screen builder.
        # When using this config, if there are any changes to the code, then
        # this config will need to be re-generated.
        config['screen']['no_create'] = True
        logging.info('Set to setup-only.  Will not run ingest.')
        logging.info(
            'The following is the updated config file from the setup.')
        with open('generated_config.yaml', 'w') as outfile:
            outfile.write(yaml.dump(config, Dumper=yaml.Dumper))
        logging.info('Generated "generated_config.yaml" config file.')
        logging.info(
            'This config file should be updated for every new version of this integration.'
        )
Exemple #7
0
    def edit(self, group_id, **kw):
        '''
        Edits an access group

        :devportal:`access-groups: edit <v2-access-groups-edit>`

        Args:
            group_id (str):
                The UUID of the access group to edit.
            name (str, optional):
                The name of the access group to edit.
            rules (list, optional):
                a list of rule tuples.  Tuples are defined in the standardized
                method of name, operator, value.  For example:

                .. code-block:: python

                    ('operating_system', 'eq', ['Windows NT'])

                Rules will be validate against by the filters before being sent
                to the API.  Note that the value field in this context is a list
                of string values.
            principals (list, optional):
                A list of principal tuples.  Each tuple must contain the type,
                the identifier and the permissions for the principal.
                The identifier can be either a UUID associated to a user/group, or the name of the
                user/group and the permissions can be either a CAN_VIEW or CAN_SCAN or Both in list
                Default permission is ``CAN_VIEW``
                For example:

                .. code-block:: python

                    ('user', '32a0c314-442b-4aed-bbf5-ba9cf5cafbf4', ['CAN_VIEW'])
                    ('user', '*****@*****.**', ['CAN_SCAN'])
                    ('group', '32a0c314-442b-4aed-bbf5-ba9cf5cafbf4')

            all_users (bool):
                If enabled, the access group will apply to all users and any
                principals defined will be ignored.
            all_assets (bool, optional):
                Specifies if the access group to modify is the default
                "all assets" group or a user-defined one.
            access_group_type (str, optional):
                The type of access group. It can be one of three possible types:
                `MANAGE_ASSETS`, `SCAN_TARGETS`
                The default is `MANAGE_ASSETS`
        '''

        # If any rules are specified, then run them through the filter parser.
        if 'rules' in kw:
            kw['rules'] = self._parse_filters(
                kw['rules'],
                self._api.filters.access_group_asset_rules_filters_v2(),
                rtype='accessgroup')['rules']

        # if any principals are specified, then run them through the principal
        # parser.
        if 'principals' in kw:
            kw['principals'] = self._principal_constructor(kw['principals'])

        # get the details of the access group that we are supposed to be editing
        # and then merge in the keywords specified.
        details = dict_merge(
            self.details(self._check('group_id', group_id, 'uuid')), kw)

        # construct the payload from the merged details.
        payload = {
            'name': self._check('name', details['name'], str),
            'all_users': self._check('all_users', details['all_users'], bool),
            'all_assets': self._check('all_assets', details['all_assets'],
                                      bool),
            'rules': details['rules'],
            'principals': details['principals'],
            'access_group_type': details['access_group_type']
        }

        # call the API endpoint and return the response to the caller.
        return self._api.put('v2/access-groups/{}'.format(group_id),
                             json=payload).json()
Exemple #8
0
from tenable_jira.jira import Jira
from tenable_jira.config import base_config
from restfly.utils import dict_merge
import yaml, json

config_file = 'config.yaml'
issue_id = 95045

config = dict_merge(base_config(),
                    yaml.load(open(config_file), Loader=yaml.Loader))
jira = Jira('https://{}/rest/api/3'.format(config['jira']['address']),
            config['jira']['api_username'], config['jira']['api_token'])

print('-- JSON Dump of Offending Issue --')
print(json.dumps(jira.issues.details(issue_id)))
print('-- JSON Dump of Issue Transitions --')
print(json.dumps(jira.issues.get_transitions(issue_id)))
Exemple #9
0
    def edit(self,
             id,
             name=None,
             members=None,
             start_time=None,
             end_time=None,
             timezone=None,
             description=None,
             frequency=None,
             interval=None,
             weekdays=None,
             day_of_month=None,
             enabled=None,
             network_id=None):
        '''
        Edit an existing scan target exclusion.

        :devportal:`exclusions: edit <exclusions-edit>`

        The edit function will first gather the details of the exclusion that
        will be edited and will overlay the changes on top.  The result will
        then be pushed back to the API to modify the exclusion.

        Args:
            id (int): The id of the exclusion object in Tenable.io
            scanner_id (int, optional): The scanner id.
            name (str, optional): The name of the exclusion to create.
            description (str, optional):
                Some further detail about the exclusion.
            start_time (datetime, optional): When the exclusion should start.
            end_time (datetime, optional): When the exclusion should end.
            timezone (str, optional):
                The timezone to use for the exclusion.  The default if none is
                specified is to use UTC.
            frequency (str, optional):
                The frequency of the rule. The string inputted will be upcased.
                Valid values are: *ONETIME, DAILY, WEEKLY, MONTHLY, YEARLY*.
            interval (int, optional): The interval of the rule.
            weekdays (list, optional):
                List of 2-character representations of the days of the week to
                repeat the frequency rule on.  Valid values are:
                *SU, MO, TU, WE, TH, FR, SA*
                Default values: ``['SU', 'MO', 'TU', 'WE', 'TH', 'FR', 'SA']``
            day_of_month (int, optional):
                The day of the month to repeat a **MONTHLY** frequency rule on.
            enabled (bool, optional):
                enable/disable exclusion.
            network_id (uuid, optional):
                The ID of the network object associated with scanners where Tenable.io applies the exclusion.

        Returns:
            :obj:`dict`:
                Dictionary of the newly minted exclusion.

        Examples:
            Modifying the name of an exclusion:

            >>> exclusion = tio.exclusions.edit(1, name='New Name')
        '''

        # Lets start constructing the payload to be sent to the API...
        payload = self.details(id)

        if name:
            payload['name'] = self._check('name', name, str)

        if members:
            payload['members'] = ','.join(self._check('members', members,
                                                      list))

        if description:
            payload['description'] = self._check('description', description,
                                                 str)

        if enabled is not None:
            payload['schedule']['enabled'] = self._check(
                'enabled', enabled, bool)

        if payload['schedule']['enabled']:
            frequency = self._check(
                'frequency',
                frequency,
                str,
                choices=['ONETIME', 'DAILY', 'WEEKLY', 'MONTHLY', 'YEARLY'],
                default=payload['schedule']['rrules'].get('freq')
                if payload['schedule']['rrules'] is not None else 'ONETIME',
                case='upper')

            # interval needs to be handled in schedule enabled excusion
            rrules = {
                'freq':
                frequency,
                'interval':
                payload['schedule']['rrules'].get('interval', None) or 1
                if payload['schedule']['rrules'] is not None else 1
            }

            # frequency default value is designed for weekly and monthly based on below conditions
            # - if schedule rrules is None and not defined in edit params, assign default values
            # - if schedule rrules is not None and not defined in edit params, assign old values
            # - if schedule rrules is not None and not defined in edit params
            # and byweekday/bymonthday key not already exist, assign default values
            # - if schedule rrules is not None and defined in edit params, assign new values
            if frequency == 'WEEKLY':
                rrules['byweekday'] = ','.join(
                    self._check(
                        'weekdays',
                        weekdays,
                        list,
                        choices=['SU', 'MO', 'TU', 'WE', 'TH', 'FR', 'SA'],
                        default=payload['schedule']['rrules'].get(
                            'byweekday', '').split()
                        or ['SU', 'MO', 'TU', 'WE', 'TH', 'FR', 'SA']
                        if payload['schedule']['rrules'] is not None else
                        ['SU', 'MO', 'TU', 'WE', 'TH', 'FR', 'SA'],
                        case='upper'))
                # In the same vein as the frequency check, we're accepting
                # case-insensitive input, comparing it to our known list of
                # acceptable responses, then joining them all together into a
                # comma-separated string.

            if frequency == 'MONTHLY':
                rrules['bymonthday'] = self._check(
                    'day_of_month',
                    day_of_month,
                    int,
                    choices=list(range(1, 32)),
                    default=payload['schedule']['rrules'].get(
                        'bymonthday',
                        datetime.today().day) if payload['schedule']['rrules']
                    is not None else datetime.today().day)

            # update new rrules in existing payload
            if payload['schedule']['rrules'] is not None:
                dict_merge(payload['schedule']['rrules'], rrules)
            else:
                payload['schedule']['rrules'] = rrules

            if start_time:
                payload['schedule']['starttime'] = self._check(
                    'start_time', start_time,
                    datetime).strftime('%Y-%m-%d %H:%M:%S')

            if end_time:
                payload['schedule']['endtime'] = self._check(
                    'end_time', end_time,
                    datetime).strftime('%Y-%m-%d %H:%M:%S')

            if interval:
                payload['schedule']['rrules']['interval'] = self._check(
                    'interval', interval, int)

            payload['schedule']['timezone'] = self._check(
                'timezone',
                timezone,
                str,
                choices=self._api._tz,
                default='Etc/UTC')

        if network_id:
            payload['network_id'] = self._check('network_id', network_id,
                                                'uuid')

        # Lets check to make sure that the scanner_id  and exclusion_id are
        # integers as the API documentation requests and if we don't raise an
        # error, then lets make the call.
        return self._api.put('exclusions/{}'.format(self._check('id', id,
                                                                int)),
                             json=payload).json()
Exemple #10
0
def cli(configfile, observed_since, setup_only=False):
    '''
    Tenable.io -> Jira Cloud Transformer & Ingester
    '''
    config = dict_merge(base_config(), yaml.load(configfile,
                                                 Loader=yaml.Loader))

    # Get the logging definition and define any defaults as need be.
    log = config.get('log', {})
    log_lvls = {'debug': 10, 'info': 20, 'warn': 30, 'error': 40}
    log['level'] = log_lvls[log.get('level', 'warn')]
    log['format'] = log.get(
        'format', '%(asctime)-15s %(name)s %(levelname)s %(message)s')

    # Configure the root logging facility
    logging.basicConfig(**log)

    # Output some basic information detailing the config file used and the
    # python version & system arch.
    logging.info('Tenable2JiraCloud Version {}'.format(__version__))
    logging.info('Using configuration file {}'.format(configfile.name))
    uname = platform.uname()
    logging.info('Running on Python {} {}/{}'.format(
        '.'.join([str(i) for i in sys.version_info][0:3]), uname[0],
        uname[-2]))

    # instantiate the Jira object
    jira = Jira('https://{}/rest/api/3'.format(config['jira']['address']),
                config['jira']['api_username'], config['jira']['api_token'])

    # Initiate the Tenable.io API model, the Ingester model, and start the
    # ingestion and data transformation.
    if config['tenable'].get('platform') == 'tenable.io':
        source = TenableIO(access_key=config['tenable'].get('access_key'),
                           secret_key=config['tenable'].get('secret_key'),
                           vendor='Tenable',
                           product='JiraCloud',
                           build=__version__)
    elif config['tenable'].get('platform') == 'tenable.sc':
        logging.error('Tenable.sc ingest is not yet implimented.')
        exit(1)
    else:
        logging.error('No valid Tenable platform configuration defined.')
        exit(1)
    ingest = Tio2Jira(source, jira, config)

    # only run the actual ingestion if the setup_only flag isn't flipped.
    if not setup_only:
        ingest.ingest(observed_since)

        # If we are expected to continually re-run the transformer, then we will
        # need to track the passage of time and run every X hours, where X is
        # defined by the user in the configuration.
        if config.get('service', {}).get('interval', 0) > 0:
            sleeper = int(config['service']['interval']) * 3600
            while True:
                last_run = int(time.time())
                logging.info('Sleeping for {}h'.format(sleeper / 3600))
                time.sleep(sleeper)
                logging.info('Initiating ingest with observed_since={}'.format(
                    last_run))
                ingest.ingest(last_run)
def cli(configfile, observed_since, setup_only=False):
    '''
    Tenable.io -> Jira Cloud Transformer & Ingester
    '''
    config = dict_merge(
        base_config(),
        yaml.load(configfile, Loader=yaml.Loader)
    )

    # Get the logging definition and define any defaults as need be.
    log = config.get('log', {})
    log_lvls = {'debug': 10, 'info': 20, 'warn': 30, 'error': 40}
    log['level'] = log_lvls[log.get('level', 'warn')]
    log['format'] = log.get('format',
        '%(asctime)-15s %(name)s %(levelname)s %(message)s')

    # Configure the root logging facility
    logging.basicConfig(**log)

    # Output some basic information detailing the config file used and the
    # python version & system arch.
    logging.info('Tenable2JiraCloud Version {}'.format(__version__))
    logging.info('Using configuration file {}'.format(configfile.name))
    uname = platform.uname()
    logging.info('Running on Python {} {}/{}'.format(
        '.'.join([str(i) for i in sys.version_info][0:3]),
        uname[0], uname[-2]))

    # instantiate the Jira object
    jira = Jira(
        'https://{}/rest/api/3'.format(config['jira']['address']),
        config['jira']['api_username'],
        config['jira']['api_token']
    )

    # Initiate the Tenable.io API model, the Ingester model, and start the
    # ingestion and data transformation.
    if config['tenable'].get('platform') == 'tenable.io':
        source = TenableIO(
            access_key=config['tenable'].get('access_key'),
            secret_key=config['tenable'].get('secret_key'),
            vendor='Tenable',
            product='JiraCloud',
            build=__version__
        )
    elif config['tenable'].get('platform') == 'tenable.sc':
        source = TenableSC(
            config['tenable'].get('address'),
            port=int(config['tenable'].get('port', 443)),
            username=config['tenable'].get('username'),
            password=config['tenable'].get('password'),
            access_key=config['tenable'].get('access_key'),
            secret_key=config['tenable'].get('secret_key'),
            vendor='Tenable',
            product='JiraCloud',
            build=__version__
        )
    else:
        logging.error('No valid Tenable platform configuration defined.')
        exit(1)
    ingest = Tio2Jira(source, jira, config)

    # only run the actual ingestion if the setup_only flag isn't flipped.
    if not setup_only:
        ingest.ingest(observed_since)

        # If we are expected to continually re-run the transformer, then we will
        # need to track the passage of time and run every X hours, where X is
        # defined by the user in the configuration.
        if config.get('service', {}).get('interval', 0) > 0:
            sleeper = int(config['service']['interval']) * 3600
            while True:
                last_run = int(time.time())
                logging.info(
                    'Sleeping for {}h'.format(sleeper/3600))
                time.sleep(sleeper)
                logging.info(
                    'Initiating ingest with observed_since={}'.format(last_run))
                ingest.ingest(last_run)
    else:
        # In setup-only mode, the ingest will not run, and instead a config file
        # will be generated that will have all of the JIRA identifiers baked in
        # and will also inform the integration to ignore the screen builder.
        # When using this config, if there are any changes to the code, then
        # this config will need to be re-generated.
        config['screen']['no_create'] = True
        logging.info('Set to setup-only.  Will not run ingest.')
        logging.info('The following is the updated config file from the setup.')
        with open('generated_config.yaml', 'w') as outfile:
            outfile.write(yaml.dump(config, Dumper=yaml.Dumper))
        logging.info('Generated "generated_config.yaml" config file.')
        logging.info('This config file should be updated for every new version of this integration.')