def load_json(self, path: str, ensure_copy: bool = False) -> Tuple[bool, dict]: """Load a json file from the cache folder. Important: if the dict is returned directly from the cache, any mutations will affect the cached dict. :param path: path relative to base folder :param ensure_copy: ensure the dict is a copy of that from the cache :returns: (from cache, the content) If from cache, mutations will directly affect the cache """ if path not in self._cache: return False, json.loads( (self._path / path).read_text(self._encoding)) ctype, content = self._cache[path] if ctype == 'text': return False, json.loads(content) if ctype == 'json': if ensure_copy: return False, copy.deepcopy(content) return True, content raise TypeError( f"content of type '{ctype}' could not be converted to a dict")
def migration_replace_text_field_with_json_field(data): """Apply migration 0033 - REV. 1.0.33 Store dict-values as JSON serializable dicts instead of strings NB! Specific for Django backend """ for content in data['export_data'].get('Computer', {}).values(): for value in ['metadata', 'transport_params']: if isinstance(content[value], str): content[value] = json.loads(content[value]) for content in data['export_data'].get('Log', {}).values(): if isinstance(content['metadata'], str): content['metadata'] = json.loads(content['metadata'])
def test_base_url(self): """ Test that / returns list of endpoints """ with self.app.test_client() as client: data_base = json.loads( client.get(self.get_url_prefix() + '/').data)['data'] data_server = json.loads( client.get(self.get_url_prefix() + '/server/endpoints').data)['data'] self.assertTrue(len(data_base['available_endpoints']) > 0) self.assertDictEqual(data_base, data_server)
def test_timezone_addition_and_dir_correction(self): """ This method tests if the timezone is added correctly to timestamps that don't have a timezone. Moreover, it checks if the given directory paths are normalized as expected. """ backup_variables = json.loads(self._json_test_input_6) self._backup_setup_inst._ignore_backup_dir_existence_check = True self._backup_setup_inst._read_backup_info_from_dict(backup_variables) self.assertIsNotNone( self._backup_setup_inst._oldest_object_bk.tzinfo, 'Timezone info should not be none (timestamp: {}).'.format( self._backup_setup_inst._oldest_object_bk)) self.assertIsNotNone( self._backup_setup_inst._end_date_of_backup.tzinfo, 'Timezone info should not be none (timestamp: {}).'.format( self._backup_setup_inst._end_date_of_backup)) self.assertIsNotNone( self._backup_setup_inst._internal_end_date_of_backup.tzinfo, 'Timezone info should not be none (timestamp: {}).'.format( self._backup_setup_inst._internal_end_date_of_backup)) # The destination directory of the _backup_setup_inst self.assertEqual( self._backup_setup_inst._backup_dir, '/scratch/aiida_user/backup', '_backup_setup_inst destination directory is not normalized as expected.' )
def test_calculation_iotree(self): """ Get filtered incoming list for given calculations """ node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] url = self.get_url_prefix() + '/nodes/' + str( node_uuid) + '/links/tree?in_limit=1&out_limit=1' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertEqual(len(response['data']['nodes']), 1) self.assertEqual(len(response['data']['nodes'][0]['incoming']), 1) self.assertEqual(len(response['data']['nodes'][0]['outgoing']), 1) self.assertEqual(len(response['data']['metadata']), 1) expected_attr = [ 'ctime', 'mtime', 'id', 'node_label', 'node_type', 'uuid', 'description', 'incoming', 'outgoing' ] received_attr = response['data']['nodes'][0].keys() for attr in expected_attr: self.assertIn(attr, received_attr) RESTApiTestCase.compare_extra_response_data(self, 'nodes', url, response, uuid=node_uuid)
def test_loading_basic_params_from_file(self): """ This method tests the correct loading of the basic _backup_setup_inst parameters from a JSON string. """ backup_variables = json.loads(self._json_test_input_1) self._backup_setup_inst._ignore_backup_dir_existence_check = True self._backup_setup_inst._read_backup_info_from_dict(backup_variables) self.assertEqual( self._backup_setup_inst._oldest_object_bk, parse('2014-07-18 13:54:53.688484+00:00'), 'Last _backup_setup_inst start date is not parsed correctly') # The destination directory of the _backup_setup_inst self.assertEqual( self._backup_setup_inst._backup_dir, '/scratch/aiida_user/backupScriptDest', '_backup_setup_inst destination directory not parsed correctly') self.assertEqual(self._backup_setup_inst._backup_length_threshold, datetime.timedelta(hours=2), '_backup_length_threshold not parsed correctly') self.assertEqual(self._backup_setup_inst._periodicity, 2, '_periodicity not parsed correctly')
def test_calculation_attributes(self): """ Get list of calculation attributes """ attributes = { 'attr1': 'OK', 'attr2': 'OK', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, } node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] url = self.get_url_prefix() + '/nodes/' + str( node_uuid) + '/contents/attributes' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) self.assertNotIn('message', response) self.assertEqual(response['data']['attributes'], attributes) RESTApiTestCase.compare_extra_response_data(self, 'nodes', url, response, uuid=node_uuid)
def test_json(self): """Test loading and dumping from json.""" dictionary_01 = extendeddicts.AttributeDict({'x': 1, 'y': 2}) dictionary_02 = json.loads(json.dumps(dictionary_01)) # Note that here I am comparing a dictionary (dictionary_02) with a # extendeddicts.AttributeDict (dictionary_02) and they still compare to equal self.assertEqual(dictionary_01, dictionary_02)
def test_projectable_properties(self): """ test projectable_properties endpoint """ for nodetype in ['nodes', 'processes', 'computers', 'users', 'groups']: url = self.get_url_prefix( ) + '/' + nodetype + '/projectable_properties' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) self.assertNotIn('message', response) expected_keys = [ 'display_name', 'help_text', 'is_display', 'is_foreign_key', 'type' ] # check fields for _, pinfo in response['data']['fields'].items(): available_keys = pinfo.keys() for prop in expected_keys: self.assertIn(prop, available_keys) # check order available_properties = response['data']['fields'].keys() for prop in response['data']['ordering']: self.assertIn(prop, available_properties)
def load_from_serialized(cls, data): """ Create a new instance loading the values from JSON-serialised data as a string :param data: The string with the JSON-serialised data to load from """ from aiida.common import json return cls.load_from_dict(json.loads(data))
def test_calculation_retrieved_outputs(self): """ Get the list of given calculation retrieved_outputs """ node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] url = self.get_url_prefix() + '/calcjobs/' + str(node_uuid) + '/output_files' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertEqual(response['data'], [{'name': 'calcjob_outputs', 'type': 'DIRECTORY'}])
def test_structure_attributes_filter(self): """ Get the list of given calculation attributes filtered """ cell = [[2., 0., 0.], [0., 2., 0.], [0., 0., 2.]] node_uuid = self.get_dummy_data()['structuredata'][0]['uuid'] url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '?attributes=true&attributes_filter=cell' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) self.assertEqual(response['data']['nodes'][0]['attributes']['cell'], cell)
def check_full_deserialization_serialization(self, input_string, backup_inst): """Utility function to compare input string with content from Backup classes.""" input_variables = json.loads(input_string) backup_inst._ignore_backup_dir_existence_check = True backup_inst._read_backup_info_from_dict(input_variables) target_variables = backup_inst._dictionarize_backup_info() self.assertEqual( input_variables, target_variables, f'The test string {input_string} did not succeed' + ' the serialization deserialization test.\n' + f'Input variables: {input_variables}\n' + f'Output variables: {target_variables}\n' )
def test_calculation_extras_filter(self): """ Get the list of given calculation extras filtered """ extras = {'extra1': False, 'extra2': 'extra_info'} node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '?extras=true&extras_filter=extra1,extra2' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertEqual(response['data']['nodes'][0]['extras']['extra1'], extras['extra1']) self.assertEqual(response['data']['nodes'][0]['extras']['extra2'], extras['extra2'])
def test_contents_attributes_filter(self): """ Get list of calculation attributes with filter attributes_filter """ node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/contents/attributes?attributes_filter="attr1"' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) self.assertNotIn('message', response) self.assertEqual(response['data']['attributes'], {'attr1': 'OK'}) RESTApiTestCase.compare_extra_response_data(self, 'nodes', url, response, uuid=node_uuid)
def test_server(self): """ Test that /server endpoint returns AiiDA version """ url = f'{self.get_url_prefix()}/server' from aiida import __version__ with self.app.test_client() as client: response = client.get(url) data = json.loads(response.data)['data'] self.assertEqual(__version__, data['AiiDA_version']) self.assertEqual(self.get_url_prefix(), data['API_prefix'])
def test_node_single_extras_filter(self): """ Check that when only one node extra is specified in extras_filter only this extra is returned as a dictionary when pagination is set """ expected_extra = ['extra2'] url = f'{self.get_url_prefix()}/nodes/page/1?perpage=10&extras=true&extras_filter=extra2' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertNotEqual(len(response['data']['nodes']), 0) for node in response['data']['nodes']: self.assertEqual(list(node['extras'].keys()), expected_extra)
def test_comments(self): """ Get the node comments """ node_uuid = self.get_dummy_data()['structuredata'][0]['uuid'] url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/contents/comments' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data)['data']['comments'] all_comments = [] for comment in response: all_comments.append(comment['message']) self.assertEqual(sorted(all_comments), sorted(['This is test comment.', 'Add another comment.']))
def read_json_files(path, *, names=('metadata.json', 'data.json')) -> List[dict]: """Get metadata.json and data.json from an exported AiiDA archive :param path: the filepath of the archive :param names: the files to retrieve """ jsons: List[dict] = [] if zipfile.is_zipfile(path): for name in names: jsons.append(json.loads(read_file_in_zip(path, name))) elif tarfile.is_tarfile(path): for name in names: jsons.append(json.loads(read_file_in_tar(path, name))) else: raise ValueError( 'invalid file format, expected either a zip archive or gzipped tarball' ) return jsons
def test_node_single_attributes_filter(self): """ Check that when only one node attribute is specified in attributes_filter only this attribute is returned as a dictionary when pagination is set """ expected_attribute = ['resources'] url = self.get_url_prefix() + '/nodes/page/1?perpage=10&attributes=true&attributes_filter=resources' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertNotEqual(len(response['data']['nodes']), 0) for node in response['data']['nodes']: self.assertEqual(list(node['attributes'].keys()), expected_attribute)
def load_from_serialized(self, data): """ Load value from serialised data :param data: The data to load from :return: The value after loading """ from aiida.common import json deser_data = json.loads(data) for key, value in deser_data.items(): self[key] = self.deserialize_field( value, self._special_serializers.get(key, None))
def test_node_namespace(self): """ Test the rest api call to get list of available node namespace """ url = self.get_url_prefix() + '/nodes/full_types' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) expected_data_keys = ['path', 'namespace', 'subspaces', 'label', 'full_type'] response_keys = response['data'].keys() for dkay in expected_data_keys: self.assertIn(dkay, response_keys) RESTApiTestCase.compare_extra_response_data(self, 'nodes', url, response)
def test_download_formats(self): """ test for download format endpoint """ url = self.get_url_prefix() + '/nodes/download_formats' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) for key in ['data.structure.StructureData.|', 'data.cif.CifData.|']: self.assertIn(key, response['data'].keys()) for key in ['cif', 'xsf', 'xyz']: self.assertIn(key, response['data']['data.structure.StructureData.|']) self.assertIn('cif', response['data']['data.cif.CifData.|'])
def test_nodes_full_type_filter(self): """ Get the list of nodes filtered by full_type """ expected_node_uuids = [] for calc in self.get_dummy_data()['calculations']: if calc['node_type'] == 'process.calculation.calcjob.CalcJobNode.': expected_node_uuids.append(calc['uuid']) url = f"{self.get_url_prefix()}/nodes/?full_type=\"process.calculation.calcjob.CalcJobNode.|\"" with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) for node in response['data']['nodes']: self.assertIn(node['uuid'], expected_node_uuids)
def test_loading_backup_time_params_from_file_4(self): """ This method tests that the _backup_setup_inst limits are correctly loaded from the JSON string and are correctly set. In the parsed JSON string, the endDateOfBackup & daysToBackuplimit are set which should lead to an exception. """ from aiida.manage.backup.backup_base import BackupError backup_variables = json.loads(self._json_test_input_5) self._backup_setup_inst._ignore_backup_dir_existence_check = True # An exception should be raised because endDateOfBackup # & daysToBackuplimit have been defined in the same time. with self.assertRaises(BackupError): self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
def test_process_report(self): """ Test process report """ node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] url = self.get_url_prefix() + '/processes/' + str(node_uuid) + '/report' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) expected_keys = response['data'].keys() for key in ['logs']: self.assertIn(key, expected_keys) expected_log_keys = response['data']['logs'][0].keys() for key in ['time', 'loggername', 'levelname', 'dbnode_id', 'message']: self.assertIn(key, expected_log_keys)
def test_repo(self): """ Test to get repo list or repo file contents for given node """ from aiida.orm import load_node node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/repo/list?filename="calcjob_inputs"' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertEqual(response['data']['repo_list'], [{'type': 'FILE', 'name': 'aiida.in'}]) url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/repo/contents?filename="calcjob_inputs/aiida.in"' with self.app.test_client() as client: response_obj = client.get(url) input_file = load_node(node_uuid).get_object_content('calcjob_inputs/aiida.in', mode='rb') self.assertEqual(response_obj.data, input_file)
def test_calculation_attributes_filter(self): """ Get the list of given calculation attributes filtered """ attributes = { 'attr1': 'OK', 'attr2': 'OK', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, } node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '?attributes=true' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertEqual(response['data']['nodes'][0]['attributes'], attributes)
def test_node_attributes_filter_pagination(self): """ Check that node attributes specified in attributes_filter are returned as a dictionary when pagination is set """ expected_attributes = ['resources', 'cell'] url = self.get_url_prefix() + '/nodes/page/1?perpage=10&attributes=true&attributes_filter=resources,cell' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertNotEqual(len(response['data']['nodes']), 0) for node in response['data']['nodes']: self.assertIn('attributes', node) self.assertNotIn('attributes.resources', node) self.assertNotIn('attributes.cell', node) self.assertEqual(len(node['attributes']), len(expected_attributes)) for attr in expected_attributes: self.assertIn(attr, node['attributes'])
def test_node_extras_filter_pagination(self): """ Check that node extras specified in extras_filter are returned as a dictionary when pagination is set """ expected_extras = ['extra1', 'extra2'] url = f'{self.get_url_prefix()}/nodes/page/1?perpage=10&extras=true&extras_filter=extra1,extra2' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertNotEqual(len(response['data']['nodes']), 0) for node in response['data']['nodes']: self.assertIn('extras', node) self.assertNotIn('extras.extra1', node) self.assertNotIn('extras.extra2', node) self.assertEqual(len(node['extras']), len(expected_extras)) for extra in expected_extras: self.assertIn(extra, node['extras'])