Esempio n. 1
0
    def _assert_flattened_parameter_names_for_job_in_job_input_parameters(
            self, job_id, expected_loaded_parameters):
        from foundations.job_parameters import flatten_parameter_dictionary
        from foundations_contrib.global_state import redis_connection
        from foundations_internal.foundations_serializer import loads

        flattened_parameters = flatten_parameter_dictionary(
            expected_loaded_parameters)

        flattened_parameters_data = []

        for parameter_name in flattened_parameters.keys():
            flattened_parameters_data.append({
                'argument': {
                    'name': parameter_name,
                    'value': {
                        'type': 'dynamic',
                        'name': parameter_name
                    }
                },
                'stage_uuid': 'stageless'
            })

        logged_parameters = redis_connection.get(
            'jobs:{}:input_parameters'.format(job_id))

        self.assertEqual(flattened_parameters_data, loads(logged_parameters))
    def test_params_are_logged(self):
        import json

        key = f'jobs:{self.job_id}:parameters'
        serialized_metric = redis_connection.get(key)
        parameters = json.loads(serialized_metric)

        self.assertEqual(20, parameters['blah'])
    def set_up(self):
        from acceptance.mixins.run_process import run_process
        from foundations_contrib.global_state import redis_connection

        redis_connection.delete('foundations_testing_job_id')
        self._deploy_job_file('acceptance/fixtures/run_locally')
        self.job_id = redis_connection.get(
            'foundations_testing_job_id').decode()
    def test_artifacts_are_saved(self):
        import json

        serialized_artifact = redis_connection.get(
            f'jobs:{self.job_id}:user_artifact_metadata')
        artifact = json.loads(serialized_artifact)

        self.assertEqual('thomas_text.txt',
                         artifact['key_mapping']['just_some_artifact'])
def _insert_input_parameter_name_into_job_input_parameter_data(redis_connection, job_id, key):
    from foundations_internal.foundations_serializer import dumps, loads

    job_params_key = f'jobs:{job_id}:input_parameters'

    serialized_job_params = redis_connection.get(job_params_key)
    job_params = _deserialized_job_params(loads, serialized_job_params, default_type=list)

    job_params.append({'argument': {'name': key, 'value': {'type': 'dynamic', 'name': key}}, 'stage_uuid': 'stageless'})

    redis_connection.set(job_params_key, dumps(job_params))
def _insert_parameter_value_into_job_run_data(redis_connection, job_id, key, value):
    import json

    job_params_key = f'jobs:{job_id}:parameters'

    serialized_job_params = redis_connection.get(job_params_key)
    job_params = _deserialized_job_params(json.loads, serialized_job_params)

    job_params[key] = value

    redis_connection.set(job_params_key, json.dumps(job_params))
Esempio n. 7
0
    def _assert_flattened_parameter_values_for_job_in_job_parameters(
            self, job_id, expected_loaded_parameters):
        from foundations.job_parameters import flatten_parameter_dictionary
        from foundations_contrib.global_state import redis_connection

        import json

        flattened_parameters = flatten_parameter_dictionary(
            expected_loaded_parameters)
        logged_parameters = redis_connection.get(
            'jobs:{}:parameters'.format(job_id))
        self.assertEqual(flattened_parameters, json.loads(logged_parameters))
    def test_local_run_job_bundle_is_same_as_remote(self):
        import os
        from foundations_contrib.utils import foundations_home
        import tarfile
        from acceptance.mixins.run_process import run_process
        from foundations_contrib.global_state import redis_connection
        import foundations

        self._deploy_job_file('acceptance/fixtures/run_locally')
        local_job_id = redis_connection.get(
            'foundations_testing_job_id').decode()

        with self.unset_foundations_home():
            remote_job = foundations.submit(
                job_directory='acceptance/fixtures/run_locally',
                command=['main.py'],
                num_gpus=0)
            remote_job.wait_for_deployment_to_complete()
            # Please forgive this hackery; we currently don't have an official way of getting archives through the SDK
            remote_job._deployment.get_job_archive()

        root_archive_directory = os.path.expanduser(
            f'{foundations_home()}/job_data/archive')
        local_archive_directory = f'{root_archive_directory}/{local_job_id}/artifacts/'
        local_files = set(os.listdir(local_archive_directory))

        job_id = remote_job.job_name()
        job_id_prefix = f'{job_id}/'
        tar_file_name = f'{job_id}.tgz'

        tar = tarfile.open(tar_file_name)
        remote_files = set([
            name[len(job_id_prefix):] for name in tar.getnames()
            if name.startswith(job_id_prefix)
        ])
        tar.close()

        try:
            os.remove(tar_file_name)
        except OSError:
            pass

        # Assert subset because the remote files actually contains an additional file generated by the job submission process
        self.assertTrue(local_files.issubset(remote_files))