Example #1
0
    def test_read_from_file(self):
        """Give it some with '@' and test that it reads from the file"""
        mock_open = mock.mock_open()
        with mock.patch('tower_cli.utils.parser.open', mock_open, create=True):
            manager = mock_open.return_value.__enter__.return_value
            manager.read.return_value = 'foo: bar'
            parser.process_extra_vars(["@fake_file1.yml"])
            parser.process_extra_vars(["@fake_file2.yml", "@fake_file3.yml"])

        # Ensure that "open" was triggered in test
        self.assertIn(mock.call("fake_file1.yml", 'r'), mock_open.mock_calls)
        self.assertIn(mock.call("fake_file2.yml", 'r'), mock_open.mock_calls)
        self.assertIn(mock.call("fake_file3.yml", 'r'), mock_open.mock_calls)
Example #2
0
    def test_read_from_file(self):
        """Give it some with '@' and test that it reads from the file"""
        mock_open = mock.mock_open()
        with mock.patch('tower_cli.utils.parser.open', mock_open, create=True):
            manager = mock_open.return_value.__enter__.return_value
            manager.read.return_value = 'foo: bar'
            parser.process_extra_vars(["@fake_file1.yml"])
            parser.process_extra_vars(["@fake_file2.yml",
                                       "@fake_file3.yml"])

        # Ensure that "open" was triggered in test
        self.assertIn(mock.call("fake_file1.yml", 'r'), mock_open.mock_calls)
        self.assertIn(mock.call("fake_file2.yml", 'r'), mock_open.mock_calls)
        self.assertIn(mock.call("fake_file3.yml", 'r'), mock_open.mock_calls)
Example #3
0
 def test_unicode_dump(self):
     """Test that data is dumped without unicode character marking."""
     for data in self.COMBINATION_DATA:
         string_rep = parser.process_extra_vars(data[0])
         self.assertEqual(yaml.load(string_rep), data[1])
         assert "python/unicode" not in string_rep
         assert "\\n" not in string_rep
Example #4
0
 def test_unicode_dump(self):
     """Test that data is dumped without unicode character marking."""
     for data in self.COMBINATION_DATA:
         string_rep = parser.process_extra_vars(data[0])
         self.assertEqual(yaml.load(string_rep), data[1])
         assert "python/unicode" not in string_rep
         assert "\\n" not in string_rep
Example #5
0
    def callback(self, pk=None, host_config_key='', extra_vars=None):
        """Contact Tower and request a configuration update using this job template.

        =====API DOCS=====
        Contact Tower and request a provisioning callback using this job template.

        :param pk: Primary key of the job template to run provisioning callback against.
        :type pk: int
        :param host_config_key: Key string used to authenticate the callback host.
        :type host_config_key: str
        :param extra_vars: Extra variables that are passed to provisioning callback.
        :type extra_vars: array of str
        :returns: A dictionary of a single key "changed", which indicates whether the provisioning callback
                  is successful.
        :rtype: dict

        =====API DOCS=====
        """
        url = self.endpoint + '%s/callback/' % pk
        if not host_config_key:
            host_config_key = client.get(url).json()['host_config_key']
        post_data = {'host_config_key': host_config_key}
        if extra_vars:
            post_data['extra_vars'] = parser.process_extra_vars(
                list(extra_vars), force_json=True)
        r = client.post(url, data=post_data, auth=None)
        if r.status_code == 201:
            return {'changed': True}
Example #6
0
    def callback(self, pk=None, host_config_key='', extra_vars=None):
        """Contact Tower and request a configuration update using this job template.

        =====API DOCS=====
        Contact Tower and request a provisioning callback using this job template.

        :param pk: Primary key of the job template to run provisioning callback against.
        :type pk: int
        :param host_config_key: Key string used to authenticate the callback host.
        :type host_config_key: str
        :param extra_vars: Extra variables that are passed to provisioning callback.
        :type extra_vars: array of str
        :returns: A dictionary of a single key "changed", which indicates whether the provisioning callback
                  is successful.
        :rtype: dict

        =====API DOCS=====
        """
        url = self.endpoint + '%s/callback/' % pk
        if not host_config_key:
            host_config_key = client.get(url).json()['host_config_key']
        post_data = {'host_config_key': host_config_key}
        if extra_vars:
            post_data['extra_vars'] = parser.process_extra_vars(list(extra_vars), force_json=True)
        r = client.post(url, data=post_data, auth=None)
        if r.status_code == 201:
            return {'changed': True}
Example #7
0
    def launch(self,
               workflow_job_template=None,
               monitor=False,
               wait=False,
               timeout=None,
               extra_vars=None,
               **kwargs):
        """Launch a new workflow job based on a workflow job template.

        Creates a new workflow job in Ansible Tower, starts it, and
        returns back an ID in order for its status to be monitored.
        """
        if len(extra_vars) > 0:
            kwargs['extra_vars'] = parser.process_extra_vars(extra_vars)

        debug.log('Launching the workflow job.', header='details')
        self._pop_none(kwargs)
        post_response = client.post(
            'workflow_job_templates/{}/launch/'.format(workflow_job_template),
            data=kwargs).json()

        workflow_job_id = post_response['id']
        post_response['changed'] = True

        if monitor:
            return self.monitor(workflow_job_id, timeout=timeout)
        elif wait:
            return self.wait(workflow_job_id, timeout=timeout)

        return post_response
Example #8
0
 def test_combination_parse_list(self):
     """Custom input-output scenario tests for 2 sources into one."""
     for data in self.COMBINATION_DATA:
         self.assertEqual(
             yaml.load(parser.process_extra_vars(data[0])),
             data[1]
         )
Example #9
0
 def test_precedence(self):
     """Test that last value is the one that overwrites the others"""
     adict = {"a": 1}
     ayml = yaml.dump(adict)
     a2dict = {"a": 2}
     a2yml = yaml.dump(a2dict)
     result = parser.process_extra_vars([ayml, a2yml])
     rdict = yaml.load(result)
     self.assertEqual(rdict['a'], 2)
Example #10
0
 def test_precedence(self):
     """Test that last value is the one that overwrites the others"""
     adict = {"a": 1}
     ayml = yaml.dump(adict)
     a2dict = {"a": 2}
     a2yml = yaml.dump(a2dict)
     result = parser.process_extra_vars([ayml, a2yml])
     rdict = yaml.load(result)
     self.assertEqual(rdict['a'], 2)
Example #11
0
 def callback(self, pk=None, host_config_key='', extra_vars=None):
     """Contact Tower and request a configuration update using this job template."""
     url = self.endpoint + '%s/callback/' % pk
     if not host_config_key:
         host_config_key = client.get(url).json()['host_config_key']
     post_data = {'host_config_key': host_config_key}
     if extra_vars:
         post_data['extra_vars'] = parser.process_extra_vars(list(extra_vars), force_json=True)
     r = client.post(url, data=post_data, auth=None)
     if r.status_code == 201:
         return {'changed': True}
Example #12
0
    def launch(self,
               workflow_job_template=None,
               monitor=False,
               wait=False,
               timeout=None,
               extra_vars=None,
               **kwargs):
        """Launch a new workflow job based on a workflow job template.

        Creates a new workflow job in Ansible Tower, starts it, and
        returns back an ID in order for its status to be monitored.

        =====API DOCS=====
        Launch a new workflow job based on a workflow job template.

        :param workflow_job_template: Primary key or name of the workflow job template to launch new job.
        :type workflow_job_template: str
        :param monitor: Flag that if set, immediately calls ``monitor`` on the newly launched workflow job rather
                        than exiting with a success.
        :type monitor: bool
        :param wait: Flag that if set, monitor the status of the workflow job, but do not print while job is
                     in progress.
        :type wait: bool
        :param timeout: If provided with ``monitor`` flag set, this attempt will time out after the given number
                        of seconds.
        :type timeout: int
        :param extra_vars: yaml formatted texts that contains extra variables to pass on.
        :type extra_vars: array of strings
        :param `**kwargs`: Fields needed to create and launch a workflow job.
        :returns: Result of subsequent ``monitor`` call if ``monitor`` flag is on; Result of subsequent ``wait``
                  call if ``wait`` flag is on; loaded JSON output of the job launch if none of the two flags are on.
        :rtype: dict

        =====API DOCS=====
        """
        if extra_vars is not None and len(extra_vars) > 0:
            kwargs['extra_vars'] = parser.process_extra_vars(extra_vars)

        debug.log('Launching the workflow job.', header='details')
        self._pop_none(kwargs)
        post_response = client.post(
            'workflow_job_templates/{0}/launch/'.format(workflow_job_template),
            data=kwargs).json()

        workflow_job_id = post_response['id']
        post_response['changed'] = True

        if monitor:
            return self.monitor(workflow_job_id, timeout=timeout)
        elif wait:
            return self.wait(workflow_job_id, timeout=timeout)

        return post_response
    def test_many_combinations(self):
        """Combine yaml with json with bare values, check that key:value
        pairs are preserved at the end."""
        adict = {"a": 1}
        bdict = {"b": 2}
        ayml = yaml.dump(adict)
        bjson = yaml.dump(bdict, default_flow_style=True)
        cyml = "c: 5"
        result = parser.process_extra_vars([ayml, bjson, cyml])
        rdict = yaml.load(result, Loader=yaml.SafeLoader)
        self.assertEqual(rdict['a'], 1)
        self.assertEqual(rdict['b'], 2)

        yaml_w_comment = "a: b\n# comment\nc: d"
        self.assertEqual(
            parser.process_extra_vars([yaml_w_comment], force_json=False),
            yaml_w_comment)
        yaml_w_comment = '{a: b,\n# comment\nc: d}'
        json_text = '{"z":"p"}'
        self.assertDictContainsSubset(
            yaml.load(yaml_w_comment, Loader=yaml.SafeLoader),
            yaml.load(parser.process_extra_vars([yaml_w_comment, json_text],
                                                force_json=False),
                      Loader=yaml.SafeLoader))
        # Test that it correctly combines a diverse set of YAML
        yml1 = "a: 1\n# a comment on second line \nb: 2"
        yml2 = "c: 3"
        self.assertEqual(
            yaml.load(parser.process_extra_vars([yml1, yml2],
                                                force_json=False),
                      Loader=yaml.SafeLoader), {
                          'a': 1,
                          'b': 2,
                          'c': 3
                      })
        # make sure it combined them into valid yaml
        self.assertFalse(
            "{" in parser.process_extra_vars([yml1, yml2], force_json=False))
Example #14
0
    def test_many_combinations(self):
        """Combine yaml with json with bare values, check that key:value
        pairs are preserved at the end."""
        adict = {"a": 1}
        bdict = {"b": 2}
        ayml = yaml.dump(adict)
        bjson = yaml.dump(bdict, default_flow_style=True)
        cyml = "c: 5"
        result = parser.process_extra_vars([ayml, bjson, cyml])
        rdict = yaml.load(result, Loader=yaml.SafeLoader)
        self.assertEqual(rdict['a'], 1)
        self.assertEqual(rdict['b'], 2)

        yaml_w_comment = "a: b\n# comment\nc: d"
        self.assertEqual(
            parser.process_extra_vars([yaml_w_comment], force_json=False),
            yaml_w_comment
        )
        yaml_w_comment = '{a: b,\n# comment\nc: d}'
        json_text = '{"z":"p"}'
        self.assertDictContainsSubset(
            yaml.load(yaml_w_comment, Loader=yaml.SafeLoader),
            yaml.load(parser.process_extra_vars(
                [yaml_w_comment, json_text], force_json=False),
                Loader=yaml.SafeLoader
            )
        )
        # Test that it correctly combines a diverse set of YAML
        yml1 = "a: 1\n# a comment on second line \nb: 2"
        yml2 = "c: 3"
        self.assertEqual(
            yaml.load(parser.process_extra_vars(
                [yml1, yml2], force_json=False), Loader=yaml.SafeLoader),
            {'a': 1, 'b': 2, 'c': 3}
        )
        # make sure it combined them into valid yaml
        self.assertFalse("{" in parser.process_extra_vars(
            [yml1, yml2], force_json=False))
Example #15
0
 def modify(self, pk=None, create_on_missing=False,
            extra_vars=None, **kwargs):
     """Modify a job template.
     You may include multiple --extra-vars flags in order to combine
     different sources of extra variables. Start this
     with @ in order to indicate a filename."""
     if extra_vars:
         # combine sources of extra variables, if given
         kwargs['extra_vars'] = parser.process_extra_vars(
             extra_vars, force_json=False
         )
     return super(Resource, self).modify(
         pk=pk, create_on_missing=create_on_missing, **kwargs
     )
Example #16
0
    def test_handling_bad_data(self):
        """Check robustness of the parser functions in how it handles
        empty strings, null values, etc."""
        # Verify that all parts of the computational chain can handle None
        return_dict = parser.parse_kv(None)
        self.assertEqual(return_dict, {})
        return_dict = parser.string_to_dict(None)
        self.assertEqual(return_dict, {})

        # Verrify that all parts of computational chain can handle ""
        return_dict = parser.parse_kv("")
        self.assertEqual(return_dict, {})
        return_dict = parser.string_to_dict("")
        self.assertEqual(return_dict, {})

        # Check that the behavior is what we want if feeding it an int
        return_dict = parser.parse_kv("foo=5")
        self.assertEqual(return_dict, {"foo": 5})

        # Check that an empty extra_vars list doesn't blow up
        return_str = parser.process_extra_vars([])
        self.assertEqual(return_str, "")
        return_str = parser.process_extra_vars([""], force_json=False)
        self.assertEqual(return_str, "")
Example #17
0
    def test_handling_bad_data(self):
        """Check robustness of the parser functions in how it handles
        empty strings, null values, etc."""
        # Verify that all parts of the computational chain can handle None
        return_dict = parser.parse_kv(None)
        self.assertEqual(return_dict, {})
        return_dict = parser.string_to_dict(None)
        self.assertEqual(return_dict, {})

        # Verrify that all parts of computational chain can handle ""
        return_dict = parser.parse_kv("")
        self.assertEqual(return_dict, {})
        return_dict = parser.string_to_dict("")
        self.assertEqual(return_dict, {})

        # Check that the behavior is what we want if feeding it an int
        return_dict = parser.parse_kv("foo=5")
        self.assertEqual(return_dict, {"foo": 5})

        # Check that an empty extra_vars list doesn't blow up
        return_str = parser.process_extra_vars([])
        self.assertEqual(return_str, "")
        return_str = parser.process_extra_vars([""], force_json=False)
        self.assertEqual(return_str, "")
Example #18
0
    def launch(self, workflow_job_template=None, monitor=False, wait=False,
               timeout=None, extra_vars=None, **kwargs):
        """Launch a new workflow job based on a workflow job template.

        Creates a new workflow job in Ansible Tower, starts it, and
        returns back an ID in order for its status to be monitored.

        =====API DOCS=====
        Launch a new workflow job based on a workflow job template.

        :param workflow_job_template: Primary key or name of the workflow job template to launch new job.
        :type workflow_job_template: str
        :param monitor: Flag that if set, immediately calls ``monitor`` on the newly launched workflow job rather
                        than exiting with a success.
        :type monitor: bool
        :param wait: Flag that if set, monitor the status of the workflow job, but do not print while job is
                     in progress.
        :type wait: bool
        :param timeout: If provided with ``monitor`` flag set, this attempt will time out after the given number
                        of seconds.
        :type timeout: int
        :param extra_vars: yaml formatted texts that contains extra variables to pass on.
        :type extra_vars: array of strings
        :param `**kwargs`: Fields needed to create and launch a workflow job.
        :returns: Result of subsequent ``monitor`` call if ``monitor`` flag is on; Result of subsequent ``wait``
                  call if ``wait`` flag is on; loaded JSON output of the job launch if none of the two flags are on.
        :rtype: dict

        =====API DOCS=====
        """
        if extra_vars is not None and len(extra_vars) > 0:
            kwargs['extra_vars'] = parser.process_extra_vars(extra_vars)

        debug.log('Launching the workflow job.', header='details')
        self._pop_none(kwargs)
        post_response = client.post('workflow_job_templates/{0}/launch/'.format(
            workflow_job_template), data=kwargs).json()

        workflow_job_id = post_response['id']
        post_response['changed'] = True

        if monitor:
            return self.monitor(workflow_job_id, timeout=timeout)
        elif wait:
            return self.wait(workflow_job_id, timeout=timeout)

        return post_response
Example #19
0
 def create(self, fail_on_found=False, force_on_exists=False,
            extra_vars=None, **kwargs):
     """Create a job template.
     You may include multiple --extra-vars flags in order to combine
     different sources of extra variables. Start this
     with @ in order to indicate a filename."""
     if extra_vars:
         # combine sources of extra variables, if given
         kwargs['extra_vars'] = parser.process_extra_vars(
             extra_vars, force_json=False
         )
     # Provide a default value for job_type, but only in creation of JT
     if not kwargs.get('job_type', False):
         kwargs['job_type'] = 'run'
     return super(Resource, self).create(
         fail_on_found=fail_on_found, force_on_exists=force_on_exists,
         **kwargs
     )
Example #20
0
 def write(self, pk=None, **kwargs):
     survey_input = kwargs.pop('survey_spec', None)
     if kwargs.get('extra_vars', None):
         kwargs['extra_vars'] = parser.process_extra_vars(
             kwargs['extra_vars'])
     ret = super(SurveyResource, self).write(pk=pk, **kwargs)
     if survey_input is not None and ret.get('id', None):
         if not isinstance(survey_input, dict):
             survey_input = json.loads(survey_input.strip(' '))
         if survey_input == {}:
             debug.log('Deleting the survey_spec.', header='details')
             r = client.delete(self._survey_endpoint(ret['id']))
         else:
             debug.log('Saving the survey_spec.', header='details')
             r = client.post(self._survey_endpoint(ret['id']),
                             data=survey_input)
         if r.status_code == 200:
             ret['changed'] = True
         if survey_input and not ret['survey_enabled']:
             debug.log('For survey to take effect, set survey_enabled'
                       ' field to True.', header='warning')
     return ret
Example #21
0
    def launch(self, job_template=None, monitor=False, timeout=None,
               no_input=True, extra_vars=None, **kwargs):
        """Launch a new job based on a job template.

        Creates a new job in Ansible Tower, immediately starts it, and
        returns back an ID in order for its status to be monitored.
        """
        # Get the job template from Ansible Tower.
        # This is used as the baseline for starting the job.

        tags = kwargs.get('tags', None)
        use_job_endpoint = kwargs.pop('use_job_endpoint', False)
        jt_resource = get_resource('job_template')
        jt = jt_resource.get(job_template)

        # Update the job data by adding an automatically-generated job name,
        # and removing the ID.
        data = copy(jt)
        data['job_template'] = data.pop('id')
        data['name'] = '%s [invoked via. Tower CLI]' % data['name']
        if tags:
            data['job_tags'] = tags

        # Initialize an extra_vars list that starts with the job template
        # preferences first, if they exist
        extra_vars_list = []
        if 'extra_vars' in data and len(data['extra_vars']) > 0:
            # But only do this for versions before 2.3
            debug.log('Getting version of Tower.', header='details')
            r = client.get('/config/')
            if LooseVersion(r.json()['version']) < LooseVersion('2.4'):
                extra_vars_list = [data['extra_vars']]

        # Add the runtime extra_vars to this list
        if extra_vars:
            extra_vars_list += list(extra_vars)  # accept tuples

        # If the job template requires prompting for extra variables,
        # do so (unless --no-input is set).
        if data.pop('ask_variables_on_launch', False) and not no_input \
                and not extra_vars:
            # If JT extra_vars are JSON, echo them to user as YAML
            initial = parser.process_extra_vars(
                [data['extra_vars']], force_json=False
            )
            initial = '\n'.join((
                '# Specify extra variables (if any) here as YAML.',
                '# Lines beginning with "#" denote comments.',
                initial,
            ))
            extra_vars = click.edit(initial) or ''
            if extra_vars != initial:
                extra_vars_list = [extra_vars]

        # Data is starting out with JT variables, and we only want to
        # include extra_vars that come from the algorithm here.
        data.pop('extra_vars', None)

        # Replace/populate data fields if prompted.
        modified = set()
        for resource in PROMPT_LIST:
            if data.pop('ask_' + resource + '_on_launch', False) \
               and not no_input or use_job_endpoint:
                resource_object = kwargs.get(resource, None)
                if type(resource_object) == types.Related:
                    resource_class = get_resource(resource)
                    resource_object = resource_class.get(resource).\
                        pop('id', None)
                if resource_object is None:
                    if not use_job_endpoint:
                        debug.log('{0} is asked at launch but not provided'.
                                  format(resource), header='warning')
                elif resource != 'tags':
                    data[resource] = resource_object
                    modified.add(resource)

        # Dump extra_vars into JSON string for launching job
        if len(extra_vars_list) > 0:
            data['extra_vars'] = parser.process_extra_vars(
                extra_vars_list, force_json=True
            )

        # In Tower 2.1 and later, we create the new job with
        # /job_templates/N/launch/; in Tower 2.0 and before, there is a two
        # step process of posting to /jobs/ and then /jobs/N/start/.
        supports_job_template_launch = False
        if 'launch' in jt['related']:
            supports_job_template_launch = True

        # Create the new job in Ansible Tower.
        start_data = {}
        if supports_job_template_launch and not use_job_endpoint:
            endpoint = '/job_templates/%d/launch/' % jt['id']
            if 'extra_vars' in data and len(data['extra_vars']) > 0:
                start_data['extra_vars'] = data['extra_vars']
            if tags:
                start_data['job_tags'] = data['job_tags']
            for resource in PROMPT_LIST:
                if resource in modified:
                    start_data[resource] = data[resource]
        else:
            debug.log('Creating the job.', header='details')
            job = client.post('/jobs/', data=data).json()
            job_id = job['id']
            endpoint = '/jobs/%d/start/' % job_id

        # There's a non-trivial chance that we are going to need some
        # additional information to start the job; in particular, many jobs
        # rely on passwords entered at run-time.
        #
        # If there are any such passwords on this job, ask for them now.
        debug.log('Asking for information necessary to start the job.',
                  header='details')
        job_start_info = client.get(endpoint).json()
        for password in job_start_info.get('passwords_needed_to_start', []):
            start_data[password] = getpass('Password for %s: ' % password)

        # Actually start the job.
        debug.log('Launching the job.', header='details')
        self._pop_none(kwargs)
        kwargs.update(start_data)
        job_started = client.post(endpoint, data=kwargs)

        # If this used the /job_template/N/launch/ route, get the job
        # ID from the result.
        if supports_job_template_launch and not use_job_endpoint:
            job_id = job_started.json()['job']

        # If returning json indicates any ignored fields, display it in
        # verbose mode.
        ignored_fields = job_started.json().get('ignored_fields', {})
        has_ignored_fields = False
        for key, value in ignored_fields.items():
            if value and value != '{}':
                if not has_ignored_fields:
                    debug.log('List of ignored fields on the server side:',
                              header='detail')
                    has_ignored_fields = True
                debug.log('{0}: {1}'.format(key, value))

        # Get some information about the running job to print
        result = self.status(pk=job_id, detail=True)
        result['changed'] = True

        # If we were told to monitor the job once it started, then call
        # monitor from here.
        if monitor:
            return self.monitor(job_id, timeout=timeout)

        return result
Example #22
0
    def test_parse_error(self):
        """Given a yaml file with incorrect syntax, throw a warning"""
        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["mixing: yaml\nwith=keyval"])

        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["incorrect == brackets"])

        # but accept data if there are just two equals
        res = parser.process_extra_vars(['password==pa#exp&U=!9Rop'])
        self.assertEqual(yaml.load(res)['password'], '=pa#exp&U=!9Rop')

        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["left_param="])

        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["incorrect = =brackets"])

        # Do not accept _raw_params
        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["42"])
Example #23
0
    def launch(self,
               job_template=None,
               monitor=False,
               timeout=None,
               no_input=True,
               extra_vars=None,
               **kwargs):
        """Launch a new job based on a job template.

        Creates a new job in Ansible Tower, immediately starts it, and
        returns back an ID in order for its status to be monitored.
        """
        # Get the job template from Ansible Tower.
        # This is used as the baseline for starting the job.

        tags = kwargs.get('tags', None)
        use_job_endpoint = kwargs.pop('use_job_endpoint', False)
        jt_resource = get_resource('job_template')
        jt = jt_resource.get(job_template)

        # Update the job data by adding an automatically-generated job name,
        # and removing the ID.
        data = copy(jt)
        data['job_template'] = data.pop('id')
        data['name'] = '%s [invoked via. Tower CLI]' % data['name']
        if tags:
            data['job_tags'] = tags

        # Initialize an extra_vars list that starts with the job template
        # preferences first, if they exist
        extra_vars_list = []
        if 'extra_vars' in data and len(data['extra_vars']) > 0:
            # But only do this for versions before 2.3
            debug.log('Getting version of Tower.', header='details')
            r = client.get('/config/')
            if LooseVersion(r.json()['version']) < LooseVersion('2.4'):
                extra_vars_list = [data['extra_vars']]

        # Add the runtime extra_vars to this list
        if extra_vars:
            extra_vars_list += list(extra_vars)  # accept tuples

        # If the job template requires prompting for extra variables,
        # do so (unless --no-input is set).
        if data.pop('ask_variables_on_launch', False) and not no_input \
                and not extra_vars:
            # If JT extra_vars are JSON, echo them to user as YAML
            initial = parser.process_extra_vars([data['extra_vars']],
                                                force_json=False)
            initial = '\n'.join((
                '# Specify extra variables (if any) here as YAML.',
                '# Lines beginning with "#" denote comments.',
                initial,
            ))
            extra_vars = click.edit(initial) or ''
            if extra_vars != initial:
                extra_vars_list = [extra_vars]

        # Data is starting out with JT variables, and we only want to
        # include extra_vars that come from the algorithm here.
        data.pop('extra_vars', None)

        # Replace/populate data fields if prompted.
        modified = set()
        for resource in PROMPT_LIST:
            if data.pop('ask_' + resource + '_on_launch', False) \
               and not no_input or use_job_endpoint:
                resource_object = kwargs.get(resource, None)
                if type(resource_object) == types.Related:
                    resource_class = get_resource(resource)
                    resource_object = resource_class.get(resource).\
                        pop('id', None)
                if resource_object is None:
                    if not use_job_endpoint:
                        debug.log(
                            '{0} is asked at launch but not provided'.format(
                                resource),
                            header='warning')
                elif resource != 'tags':
                    data[resource] = resource_object
                    modified.add(resource)

        # Dump extra_vars into JSON string for launching job
        if len(extra_vars_list) > 0:
            data['extra_vars'] = parser.process_extra_vars(extra_vars_list,
                                                           force_json=True)

        # In Tower 2.1 and later, we create the new job with
        # /job_templates/N/launch/; in Tower 2.0 and before, there is a two
        # step process of posting to /jobs/ and then /jobs/N/start/.
        supports_job_template_launch = False
        if 'launch' in jt['related']:
            supports_job_template_launch = True

        # Create the new job in Ansible Tower.
        start_data = {}
        if supports_job_template_launch and not use_job_endpoint:
            endpoint = '/job_templates/%d/launch/' % jt['id']
            if 'extra_vars' in data and len(data['extra_vars']) > 0:
                start_data['extra_vars'] = data['extra_vars']
            if tags:
                start_data['job_tags'] = data['job_tags']
            for resource in PROMPT_LIST:
                if resource in modified:
                    start_data[resource] = data[resource]
        else:
            debug.log('Creating the job.', header='details')
            job = client.post('/jobs/', data=data).json()
            job_id = job['id']
            endpoint = '/jobs/%d/start/' % job_id

        # There's a non-trivial chance that we are going to need some
        # additional information to start the job; in particular, many jobs
        # rely on passwords entered at run-time.
        #
        # If there are any such passwords on this job, ask for them now.
        debug.log('Asking for information necessary to start the job.',
                  header='details')
        job_start_info = client.get(endpoint).json()
        for password in job_start_info.get('passwords_needed_to_start', []):
            start_data[password] = getpass('Password for %s: ' % password)

        # Actually start the job.
        debug.log('Launching the job.', header='details')
        self._pop_none(kwargs)
        kwargs.update(start_data)
        job_started = client.post(endpoint, data=kwargs)

        # If this used the /job_template/N/launch/ route, get the job
        # ID from the result.
        if supports_job_template_launch and not use_job_endpoint:
            job_id = job_started.json()['job']

        # Get some information about the running job to print
        result = self.status(pk=job_id, detail=True)
        result['changed'] = True

        # If we were told to monitor the job once it started, then call
        # monitor from here.
        if monitor:
            return self.monitor(job_id, timeout=timeout)

        return result
Example #24
0
    def launch(
        self, job_template=None, tags=None, monitor=False, timeout=None, no_input=True, extra_vars=None, **kwargs
    ):
        """Launch a new job based on a job template.

        Creates a new job in Ansible Tower, immediately starts it, and
        returns back an ID in order for its status to be monitored.
        """
        # Get the job template from Ansible Tower.
        # This is used as the baseline for starting the job.
        jt_resource = get_resource("job_template")
        jt = jt_resource.get(job_template)

        # Update the job data by adding an automatically-generated job name,
        # and removing the ID.
        data = copy(jt)
        data["job_template"] = data.pop("id")
        data["name"] = "%s [invoked via. Tower CLI]" % data["name"]
        if tags:
            data["job_tags"] = tags

        # Initialize an extra_vars list that starts with the job template
        # preferences first, if they exist
        extra_vars_list = []
        if "extra_vars" in data and len(data["extra_vars"]) > 0:
            # But only do this for versions before 2.3
            debug.log("Getting version of Tower.", header="details")
            r = client.get("/config/")
            if LooseVersion(r.json()["version"]) < LooseVersion("2.4"):
                extra_vars_list = [data["extra_vars"]]

        # Add the runtime extra_vars to this list
        if extra_vars:
            extra_vars_list += list(extra_vars)  # accept tuples

        # If the job template requires prompting for extra variables,
        # do so (unless --no-input is set).
        if data.pop("ask_variables_on_launch", False) and not no_input and not extra_vars:
            # If JT extra_vars are JSON, echo them to user as YAML
            initial = parser.process_extra_vars([data["extra_vars"]], force_json=False)
            initial = "\n".join(
                (
                    "# Specify extra variables (if any) here as YAML.",
                    '# Lines beginning with "#" denote comments.',
                    initial,
                )
            )
            extra_vars = click.edit(initial) or ""
            if extra_vars != initial:
                extra_vars_list = [extra_vars]

        # Data is starting out with JT variables, and we only want to
        # include extra_vars that come from the algorithm here.
        data.pop("extra_vars", None)

        # Dump extra_vars into JSON string for launching job
        if len(extra_vars_list) > 0:
            data["extra_vars"] = parser.process_extra_vars(extra_vars_list, force_json=True)

        # In Tower 2.1 and later, we create the new job with
        # /job_templates/N/launch/; in Tower 2.0 and before, there is a two
        # step process of posting to /jobs/ and then /jobs/N/start/.
        supports_job_template_launch = False
        if "launch" in jt["related"]:
            supports_job_template_launch = True

        # Create the new job in Ansible Tower.
        start_data = {}
        if supports_job_template_launch:
            endpoint = "/job_templates/%d/launch/" % jt["id"]
            if "extra_vars" in data and len(data["extra_vars"]) > 0:
                start_data["extra_vars"] = data["extra_vars"]
            if tags:
                start_data["job_tags"] = data["job_tags"]
        else:
            debug.log("Creating the job.", header="details")
            job = client.post("/jobs/", data=data).json()
            job_id = job["id"]
            endpoint = "/jobs/%d/start/" % job_id

        # There's a non-trivial chance that we are going to need some
        # additional information to start the job; in particular, many jobs
        # rely on passwords entered at run-time.
        #
        # If there are any such passwords on this job, ask for them now.
        debug.log("Asking for information necessary to start the job.", header="details")
        job_start_info = client.get(endpoint).json()
        for password in job_start_info.get("passwords_needed_to_start", []):
            start_data[password] = getpass("Password for %s: " % password)

        # Actually start the job.
        debug.log("Launching the job.", header="details")
        self._pop_none(kwargs)
        kwargs.update(start_data)
        job_started = client.post(endpoint, data=kwargs)

        # If this used the /job_template/N/launch/ route, get the job
        # ID from the result.
        if supports_job_template_launch:
            job_id = job_started.json()["job"]

        # Get some information about the running job to print
        result = self.status(pk=job_id, detail=True)
        result["changed"] = True

        # If we were told to monitor the job once it started, then call
        # monitor from here.
        if monitor:
            return self.monitor(job_id, timeout=timeout)

        return result
Example #25
0
 def test_combination_parse_list(self):
     """Custom input-output scenario tests for 2 sources into one."""
     for data in self.COMBINATION_DATA:
         self.assertEqual(yaml.load(parser.process_extra_vars(data[0])),
                          data[1])
Example #26
0
    def test_parse_error(self):
        """Given a yaml file with incorrect syntax, throw a warning"""
        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["mixing: yaml\nwith=keyval"])

        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["incorrect == brackets"])

        # but accept data if there are just two equals
        res = parser.process_extra_vars(['password==pa#exp&U=!9Rop'])
        self.assertEqual(yaml.load(res)['password'], '=pa#exp&U=!9Rop')

        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["left_param="])

        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["incorrect = =brackets"])

        # Do not accept _raw_params
        with self.assertRaises(exc.TowerCLIError):
            parser.process_extra_vars(["42"])
    def launch(self,
               job_template=None,
               monitor=False,
               wait=False,
               timeout=None,
               no_input=True,
               extra_vars=None,
               **kwargs):
        """Launch a new job based on a job template.

        Creates a new job in Ansible Tower, immediately starts it, and
        returns back an ID in order for its status to be monitored.

        =====API DOCS=====
        Launch a new job based on a job template.

        :param job_template: Primary key or name of the job template to launch new job.
        :type job_template: str
        :param monitor: Flag that if set, immediately calls ``monitor`` on the newly launched job rather
                        than exiting with a success.
        :type monitor: bool
        :param wait: Flag that if set, monitor the status of the job, but do not print while job is in progress.
        :type wait: bool
        :param timeout: If provided with ``monitor`` flag set, this attempt will time out after the given number
                        of seconds.
        :type timeout: int
        :param no_input: Flag that if set, suppress any requests for input.
        :type no_input: bool
        :param extra_vars: yaml formatted texts that contains extra variables to pass on.
        :type extra_vars: array of strings
        :param diff_mode: Specify diff mode for job template to run.
        :type diff_mode: bool
        :param limit: Specify host limit for job template to run.
        :type limit: str
        :param tags: Specify tagged actions in the playbook to run.
        :type tags: str
        :param skip_tags: Specify tagged actions in the playbook to omit.
        :type skip_tags: str
        :param job_type: Specify job type for job template to run.
        :type job_type: str
        :param verbosity: Specify verbosity of the playbook run.
        :type verbosity: int
        :param inventory: Specify machine credential for job template to run.
        :type inventory: str
        :param credential: Specify machine credential for job template to run.
        :type credential: str
        :returns: Result of subsequent ``monitor`` call if ``monitor`` flag is on; Result of subsequent
                  ``wait`` call if ``wait`` flag is on; Result of subsequent ``status`` call if none of
                  the two flags are on.
        :rtype: dict

        =====API DOCS=====
        """
        # Get the job template from Ansible Tower.
        # This is used as the baseline for starting the job.

        tags = kwargs.get('tags', None)
        jt_resource = get_resource('job_template')
        jt = jt_resource.get(job_template)

        # Update the job data by adding an automatically-generated job name,
        # and removing the ID.
        data = {}
        if tags:
            data['job_tags'] = tags

        # Initialize an extra_vars list that starts with the job template
        # preferences first, if they exist
        extra_vars_list = []
        if 'extra_vars' in data and len(data['extra_vars']) > 0:
            # But only do this for versions before 2.3
            debug.log('Getting version of Tower.', header='details')
            r = client.get('/config/')
            if LooseVersion(r.json()['version']) < LooseVersion('2.4'):
                extra_vars_list = [data['extra_vars']]

        # Add the runtime extra_vars to this list
        if extra_vars:
            extra_vars_list += list(extra_vars)  # accept tuples

        # If the job template requires prompting for extra variables,
        # do so (unless --no-input is set).
        if jt.get('ask_variables_on_launch', False) and not no_input \
                and not extra_vars:
            # If JT extra_vars are JSON, echo them to user as YAML
            initial = parser.process_extra_vars([jt['extra_vars']],
                                                force_json=False)
            initial = '\n'.join((
                '# Specify extra variables (if any) here as YAML.',
                '# Lines beginning with "#" denote comments.',
                initial,
            ))
            extra_vars = click.edit(initial) or ''
            if extra_vars != initial:
                extra_vars_list = [extra_vars]

        # Data is starting out with JT variables, and we only want to
        # include extra_vars that come from the algorithm here.
        data.pop('extra_vars', None)

        # Replace/populate data fields if prompted.
        modified = set()
        for resource in PROMPT_LIST:
            if jt.pop('ask_' + resource + '_on_launch',
                      False) and not no_input:
                resource_object = kwargs.get(resource, None)
                if type(resource_object) == types.Related:
                    resource_class = get_resource(resource)
                    resource_object = resource_class.get(resource).pop(
                        'id', None)
                if resource_object is None:
                    debug.log('{0} is asked at launch but not provided'.format(
                        resource),
                              header='warning')
                elif resource != 'tags':
                    data[resource] = resource_object
                    modified.add(resource)

        # Dump extra_vars into JSON string for launching job
        if len(extra_vars_list) > 0:
            data['extra_vars'] = parser.process_extra_vars(extra_vars_list,
                                                           force_json=True)

        # Create the new job in Ansible Tower.
        start_data = {}
        endpoint = '/job_templates/%d/launch/' % jt['id']
        if 'extra_vars' in data and len(data['extra_vars']) > 0:
            start_data['extra_vars'] = data['extra_vars']
        if tags:
            start_data['job_tags'] = data['job_tags']
        for resource in PROMPT_LIST:
            if resource in modified:
                start_data[resource] = data[resource]

        # There's a non-trivial chance that we are going to need some
        # additional information to start the job; in particular, many jobs
        # rely on passwords entered at run-time.
        #
        # If there are any such passwords on this job, ask for them now.
        debug.log('Asking for information necessary to start the job.',
                  header='details')
        job_start_info = client.get(endpoint).json()
        for password in job_start_info.get('passwords_needed_to_start', []):
            start_data[password] = getpass('Password for %s: ' % password)

        # Actually start the job.
        debug.log('Launching the job.', header='details')
        self._pop_none(kwargs)
        kwargs.update(start_data)
        job_started = client.post(endpoint, data=kwargs)

        # Get the job ID from the result.
        job_id = job_started.json()['id']

        # If returning json indicates any ignored fields, display it in
        # verbose mode.
        if job_started.text == '':
            ignored_fields = {}
        else:
            ignored_fields = job_started.json().get('ignored_fields', {})
        has_ignored_fields = False
        for key, value in ignored_fields.items():
            if value and value != '{}':
                if not has_ignored_fields:
                    debug.log('List of ignored fields on the server side:',
                              header='detail')
                    has_ignored_fields = True
                debug.log('{0}: {1}'.format(key, value))

        # Get some information about the running job to print
        result = self.status(pk=job_id, detail=True)
        result['changed'] = True

        # If we were told to monitor the job once it started, then call
        # monitor from here.
        if monitor:
            return self.monitor(job_id, timeout=timeout)
        elif wait:
            return self.wait(job_id, timeout=timeout)

        return result