def test_delete(self): """Test `verdi group delete` command.""" orm.Group(label='group_test_delete_01').store() orm.Group(label='group_test_delete_02').store() result = self.cli_runner.invoke(cmd_group.group_delete, ['--force', 'group_test_delete_01']) self.assertClickResultNoException(result) # Verify that removed group is not present in list result = self.cli_runner.invoke(cmd_group.group_list) self.assertClickResultNoException(result) self.assertNotIn('group_test_delete_01', result.output) node_01 = orm.CalculationNode().store() node_02 = orm.CalculationNode().store() # Add some nodes and then use `verdi group delete` to delete a group that contains nodes group = orm.load_group(label='group_test_delete_02') group.add_nodes([node_01, node_02]) self.assertEqual(group.count(), 2) result = self.cli_runner.invoke(cmd_group.group_delete, ['--force', 'group_test_delete_02']) self.assertClickResultNoException(result) with self.assertRaises(exceptions.NotExistent): orm.load_group(label='group_test_delete_02')
def test_delete(self): """Test `verdi group delete` command.""" orm.Group(label='group_test_delete_01').store() orm.Group(label='group_test_delete_02').store() result = self.cli_runner.invoke(cmd_group.group_delete, ['--force', 'group_test_delete_01']) self.assertClickResultNoException(result) # Verify that removed group is not present in list result = self.cli_runner.invoke(cmd_group.group_list) self.assertClickResultNoException(result) self.assertNotIn('group_test_delete_01', result.output) node_01 = orm.CalculationNode().store() node_02 = orm.CalculationNode().store() # Add some nodes and then use `verdi group delete --clear` to delete a node even when it contains nodes group = orm.load_group(label='group_test_delete_02') group.add_nodes([node_01, node_02]) self.assertEqual(group.count(), 2) # Calling delete on a group without the `--clear` option should raise result = self.cli_runner.invoke(cmd_group.group_delete, ['--force', 'group_test_delete_02']) self.assertIsNotNone(result.exception, result.output) # With `--clear` option should delete group and nodes result = self.cli_runner.invoke( cmd_group.group_delete, ['--force', '--clear', 'group_test_delete_02']) self.assertClickResultNoException(result) with self.assertRaises(exceptions.NotExistent): group = orm.load_group(label='group_test_delete_02')
def test_loading(): """Test that loading instances from the database returns the correct subclass of `Group`.""" group = orm.Group('normal-group').store() loaded = orm.load_group(group.pk) assert isinstance(loaded, orm.Group) group = orm.AutoGroup('auto-group').store() loaded = orm.load_group(group.pk) assert isinstance(group, orm.AutoGroup)
def __init__(self, pw_code_id: ty.Union[str, int], structure_group_id: ty.Union[str, int], pseudo_family_id: ty.Union[str, int], *args, structure_filters: ty.Optional[ty.Dict[str, ty.Any]] = None, **kwargs): """A SubmissionController for PwBaseWorkChains.""" super().__init__(*args, **kwargs) self._code = orm.load_code(identifier=pw_code_id) self._process_class = plugins.WorkflowFactory( self.WORKFLOW_ENTRY_POINT) self._structure_group = orm.load_group(identifier=structure_group_id) self._structure_filters = structure_filters if structure_filters is not None else {} self._pseudo_family = orm.load_group(identifier=pseudo_family_id)
def deserialize_data(data): """ Deserialize a single value or a collection that may contain serialized AiiDA nodes. This is essentially the inverse operation of serialize_data which will reload node instances from the serialized UUID data. Encoded tuples that are used as dictionary keys will be decoded. :param data: serialized data :return: the deserialized data with keys decoded and node instances loaded from UUID's """ if isinstance(data, AttributeDict): return AttributeDict({ decode_key(key): deserialize_data(value) for key, value in data.iteritems() }) elif isinstance(data, AttributesFrozendict): return AttributesFrozendict({ decode_key(key): deserialize_data(value) for key, value in data.iteritems() }) elif isinstance(data, collections.Mapping): return { decode_key(key): deserialize_data(value) for key, value in data.iteritems() } elif isinstance(data, collections.Sequence) and not isinstance( data, (str, unicode)): return [deserialize_data(value) for value in data] elif isinstance(data, (str, unicode)) and data.startswith(_PREFIX_VALUE_NODE): return load_node(uuid=data[len(_PREFIX_VALUE_NODE):]) elif isinstance(data, (str, unicode)) and data.startswith(_PREFIX_VALUE_GROUP): return load_group(uuid=data[len(_PREFIX_VALUE_GROUP):]) else: return data
def test_delete_extra(self): """Test the `Group.delete_extra` method.""" self.group.set_extra('valid_key', 'value') self.assertEqual(self.group.get_extra('valid_key'), 'value') self.group.delete_extra('valid_key') with self.assertRaises(AttributeError): self.group.delete_extra('valid_key') # Repeat with stored group self.group.set_extra('valid_key', 'value') self.group.store() self.group.delete_extra('valid_key') with self.assertRaises(AttributeError): orm.load_group(self.group.pk).get_extra('valid_key')
def cmd_stage_relax(details, max_atoms): """Commands to analyse the relax stage of the project.""" import collections from aiida.orm import load_group, QueryBuilder, Group, WorkChainNode, Data group = load_group('workchain/relax') filters_structure = {} filters_workchain = {'attributes.exit_status': 0} if max_atoms is not None: filters_structure['attributes.sites'] = {'shorter': max_atoms + 1} if not details: query = QueryBuilder() query.append(Group, filters={'id': group.pk}, tag='group') query.append(WorkChainNode, with_group='group', filters=filters_workchain, tag='relax', project='id') query.append(WorkChainNode, with_incoming='relax', project='id', tag='base') query.append(Data, with_outgoing='relax', edge_filters={'label': 'structure'}, filters=filters_structure) mapping = collections.defaultdict(list) for relax, called in query.iterall(): mapping[relax].append(called) counts = [] for called in mapping.values(): counts.append(len(called)) table = [] counter = collections.Counter(counts) total = sum(counter.values()) cumulative = 0 for iterations, count in sorted(counter.items(), key=lambda item: item[1], reverse=True): percentage = (count / total) * 100 cumulative += percentage table.append((count, percentage, cumulative, iterations)) click.echo( tabulate.tabulate( table, headers=['Count', 'Percentage', 'Cumulative', 'Iterations'])) else: print('test')
def test_load(clear_db): """Test that loading of a `SsspFamily` through `load_group` works.""" family = SsspFamily(label='SSSP').store() assert isinstance(family, SsspFamily) loaded = orm.load_group(family.pk) assert isinstance(family, SsspFamily) assert loaded.uuid == family.uuid assert loaded.elements == family.elements
def test_delete(self): """Test `verdi group delete` command.""" orm.Group(label='group_test_delete_01').store() orm.Group(label='group_test_delete_02').store() orm.Group(label='group_test_delete_03').store() # dry run result = self.cli_runner.invoke(cmd_group.group_delete, ['--dry-run', 'group_test_delete_01']) self.assertClickResultNoException(result) orm.load_group(label='group_test_delete_01') result = self.cli_runner.invoke(cmd_group.group_delete, ['--force', 'group_test_delete_01']) self.assertClickResultNoException(result) # Verify that removed group is not present in list result = self.cli_runner.invoke(cmd_group.group_list) self.assertClickResultNoException(result) self.assertNotIn('group_test_delete_01', result.output) node_01 = orm.CalculationNode().store() node_02 = orm.CalculationNode().store() node_pks = {node_01.pk, node_02.pk} # Add some nodes and then use `verdi group delete` to delete a group that contains nodes group = orm.load_group(label='group_test_delete_02') group.add_nodes([node_01, node_02]) self.assertEqual(group.count(), 2) result = self.cli_runner.invoke(cmd_group.group_delete, ['--force', 'group_test_delete_02']) with self.assertRaises(exceptions.NotExistent): orm.load_group(label='group_test_delete_02') # check nodes still exist for pk in node_pks: orm.load_node(pk) # delete the group and the nodes it contains group = orm.load_group(label='group_test_delete_03') group.add_nodes([node_01, node_02]) result = self.cli_runner.invoke( cmd_group.group_delete, ['--force', '--delete-nodes', 'group_test_delete_03']) self.assertClickResultNoException(result) # check group and nodes no longer exist with self.assertRaises(exceptions.NotExistent): orm.load_group(label='group_test_delete_03') for pk in node_pks: with self.assertRaises(exceptions.NotExistent): orm.load_node(pk)
def group_constructor(loader, group): """Load a group from the yaml representation. :param loader: the yaml loader :param group: the yaml representation :return: the aiida group :rtype: :class:`aiida.orm.Group` """ yaml_node = loader.construct_scalar(group) return orm.load_group(uuid=yaml_node)
def cmd_stage_scf(max_atoms): """Commands to analyse the scf stage of the project.""" import collections from aiida.orm import load_group, QueryBuilder, Group, CalcJobNode, WorkChainNode, Data group = load_group('workchain/scf') filters_structure = {} filters_workchain = {'attributes.exit_status': 0} if max_atoms is not None: filters_structure['attributes.sites'] = {'shorter': max_atoms + 1} query = QueryBuilder() query.append(Group, filters={'id': group.pk}, tag='group') query.append(WorkChainNode, with_group='group', filters=filters_workchain, tag='scf', project='id') query.append(CalcJobNode, with_incoming='scf', project='attributes.exit_status') query.append(Data, with_outgoing='scf', edge_filters={'label': 'pw__structure'}, filters=filters_structure) mapping = collections.defaultdict(list) for scf, exit_status in query.iterall(): mapping[scf].append(exit_status) counts = [] for exit_statuses in mapping.values(): counts.append(tuple(exit_statuses)) table = [] counter = collections.Counter(counts) total = sum(counter.values()) cumulative = 0 for exit_statuses, count in sorted(counter.items(), key=lambda item: item[1], reverse=True): percentage = (count / total) * 100 cumulative += percentage table.append((count, percentage, cumulative, exit_statuses)) click.echo( tabulate.tabulate( table, headers=['Count', 'Percentage', 'Cumulative', 'Exit statuses']))
def test_set_extra(self): """Test the `Group.set_extra` method.""" with self.assertRaises(exceptions.ValidationError): self.group.set_extra('illegal.key', 'value') self.group.set_extra('valid_key', 'value') self.group.store() self.group.set_extra('valid_key', 'changed') self.assertEqual( orm.load_group(self.group.pk).get_extra('valid_key'), 'changed')
def cmd_uniques( group, databases, not_elements, elements, max_atoms, number_species, partial_occupancies, no_cod_hydrogen, verbose ): """Pass.""" from tabulate import tabulate from aiida import orm filters = {'and': []} if not group and not databases: raise click.BadParameter('need at least a GROUP or `--databases` to be specified') if not group: if len(databases) >= 1: raise click.BadParameter('can only specify one database when not specifying a GROUP') group = orm.load_group('{}/structure/unique'.format(databases[0])) if no_cod_hydrogen: filters['and'].append({'id': {'!in': get_cod_hydrogen_structure_ids()}}) if max_atoms is not None: filters['and'].append({'attributes.sites': {'shorter': max_atoms + 1}}) if number_species is not None: filters['and'].append({'attributes.kinds': {'of_length': number_species}}) if elements: filters['and'].append({'extras.chemical_system': {'like': '%-{}-%'.format('-%-'.join(sorted(elements)))}}) if not_elements: for element in not_elements: filters['and'].append({'extras.chemical_system': {'!like': '%-{}-%'.format(element)}}) if partial_occupancies is not None: filters['and'].append({'extras.{}'.format(KEY_PARTIAL_OCCUPANCIES): partial_occupancies}) if databases: for name in DATABASES: key = 'has_key' if name in databases else '!has_key' filters['and'].append({'extras.duplicates': {key: name}}) builder = orm.QueryBuilder().append( orm.Group, filters={'id': group.id}, tag='group').append( orm.StructureData, with_group='group', filters=filters) if not verbose: echo.echo('{}'.format(builder.count())) else: rows = [] for [structure] in builder.iterall(): rows.append((structure.get_formula(), len(structure.kinds), len(structure.sites), structure.uuid, structure.get_extra('source')['id'])) echo.echo(tabulate(rows, headers=['Formula', '# species', '# atoms', 'UUID', 'Source identifier']))
def test_copy_existing_group(self): """Test user is prompted to continue if destination group exists and is not empty""" source_label = 'source_copy_existing_group' dest_label = 'dest_copy_existing_group' # Create source group with nodes calc_s1 = orm.CalculationNode().store() calc_s2 = orm.CalculationNode().store() nodes_source_group = {str(node.uuid) for node in [calc_s1, calc_s2]} source_group = orm.Group(label=source_label).store() source_group.add_nodes([calc_s1, calc_s2]) # Copy using `verdi group copy` - making sure all is successful options = [source_label, dest_label] result = self.cli_runner.invoke(cmd_group.group_copy, options) self.assertClickResultNoException(result) self.assertIn( 'Success: Nodes copied from group<{}> to group<{}>'.format( source_label, dest_label), result.output, result.exception) # Check destination group exists with source group's nodes dest_group = orm.load_group(label=dest_label) self.assertEqual(dest_group.count(), 2) nodes_dest_group = {str(node.uuid) for node in dest_group.nodes} self.assertSetEqual(nodes_source_group, nodes_dest_group) # Copy again, making sure an abort error is raised, since no user input can be made and default is abort result = self.cli_runner.invoke(cmd_group.group_copy, options) self.assertIsNotNone(result.exception, result.output) self.assertIn( 'Warning: Destination group<{}> already exists and is not empty.'. format(dest_label), result.output, result.exception) # Check destination group is unchanged dest_group = orm.load_group(label=dest_label) self.assertEqual(dest_group.count(), 2) nodes_dest_group = {str(node.uuid) for node in dest_group.nodes} self.assertSetEqual(nodes_source_group, nodes_dest_group)
def test_loading_unregistered(): """Test rules around loading `Group` subclasses without a registered entry point. Storing instances of unregistered subclasses is not allowed so we have to create one sneakily by instantiating a normal group and manipulating the type string directly on the database model. """ group = orm.Group(label='group') group.backend_entity.dbmodel.type_string = 'unregistered.subclass' group.store() with pytest.warns(UserWarning): loaded = orm.load_group(group.pk) assert isinstance(loaded, orm.Group)
def test_clear_extras(self): """Test the `Group.clear_extras` method.""" extras = {'extra_one': 'value', 'extra_two': 'value'} self.group.set_extra_many(extras) self.assertEqual(self.group.extras, extras) self.group.clear_extras() self.assertEqual(self.group.extras, {}) # Repeat for stored group self.group.store() self.group.clear_extras() self.assertEqual(orm.load_group(self.group.pk).extras, {})
def convert(self, value, param, ctx): """Convert the value to actual pseudo family instance.""" try: group = super().convert(value, param, ctx) except click.BadParameter: try: from aiida.orm import load_group load_group(value) except exceptions.NotExistent: # pylint: disable=try-except-raise raise else: raise click.BadParameter( # pylint: disable=raise-missing-from f'`{value}` is not of a supported pseudopotential family type.\nTo install a supported ' 'pseudofamily, use the `aiida-pseudo` plugin. See the following link for detailed instructions:\n\n' ' https://github.com/aiidateam/aiida-quantumespresso#pseudopotentials' ) if self._pseudo_types is not None and group.pseudo_type not in self._pseudo_types: pseudo_types = ', '.join(self._pseudo_types) raise click.BadParameter( f'family `{group.label}` contains pseudopotentials of the wrong type `{group.pseudo_type}`.\nOnly the ' f'following types are supported: {pseudo_types}') return group
def test_description(self): """Test `verdi group description` command.""" description = 'It is a new description' group = orm.load_group(label='dummygroup2') self.assertNotEqual(group.description, description) # Change the description of the group result = self.cli_runner.invoke(group_description, [group.label, description]) self.assertClickResultNoException(result) self.assertEqual(group.description, description) # When no description argument is passed the command should just echo the current description result = self.cli_runner.invoke(group_description, [group.label]) self.assertClickResultNoException(result) self.assertIn(description, result.output)
def test_delete_extra_many(self): """Test the `Group.delete_extra_many` method.""" extras_valid = {'extra_one': 'value', 'extra_two': 'value'} valid_keys = ['extra_one', 'extra_two'] invalid_keys = ['extra_one', 'invalid_key'] self.group.set_extra_many(extras_valid) self.assertEqual(self.group.extras, extras_valid) with self.assertRaises(AttributeError): self.group.delete_extra_many(invalid_keys) self.group.store() self.group.delete_extra_many(valid_keys) self.assertEqual(orm.load_group(self.group.pk).extras, {})
def test_reset_extra(self): """Test the `Group.reset_extra` method.""" extras_before = {'extra_one': 'value', 'extra_two': 'value'} extras_after = {'extra_three': 'value', 'extra_four': 'value'} extras_illegal = {'extra.illegal': 'value', 'extra_four': 'value'} self.group.set_extra_many(extras_before) self.assertEqual(self.group.extras, extras_before) self.group.reset_extras(extras_after) self.assertEqual(self.group.extras, extras_after) with self.assertRaises(exceptions.ValidationError): self.group.reset_extras(extras_illegal) self.group.store() self.group.reset_extras(extras_after) self.assertEqual(orm.load_group(self.group.pk).extras, extras_after)
def query_nodes(self, uuid=None, proc_label=None, proc_status='finished', exit_status=0): """This function performs the querying Returns: -------- nodes: list the list of all uuid of each calculations within the group """ # Lets load the group PK and it is Label(name) # And AiiDA QueryBuilder() # Handle invalid and new inputs. if uuid is None and self.group_uuid is None: raise QueryCalculationsFromGroupError("Invalid input parameters") elif uuid is not None: self.__init__(group_uuid=uuid, process_label=proc_label, process_status_name=proc_status, exit_status=exit_status) filters = self.filters group_uuid = self.group_uuid group = orm.load_group(group_uuid) group_name = group.label qb = orm.QueryBuilder() qb.append(orm.Node, tag="nodes", project=["id"], filters=filters) qb.append(orm.Group, tag="group", with_node="nodes", filters={"label": group_name}) n_nodes = qb.distinct().all() res_nodes = [x for x in qb.all()] self.nodes = list() for i in range(len(n_nodes)): self.nodes.append(qb.dict()[i]["nodes"]["id"]) self.nodes = [i for i in self.nodes if i] return self.nodes
def test_add_remove_nodes(self): """Test `verdi group remove-nodes` command.""" node_01 = orm.CalculationNode().store() node_02 = orm.CalculationNode().store() node_03 = orm.CalculationNode().store() result = self.cli_runner.invoke( cmd_group.group_add_nodes, ['--force', '--group=dummygroup1', node_01.uuid]) self.assertClickResultNoException(result) # Check if node is added in group using group show command result = self.cli_runner.invoke(cmd_group.group_show, ['dummygroup1']) self.assertClickResultNoException(result) self.assertIn('CalculationNode', result.output) self.assertIn(str(node_01.pk), result.output) # Remove same node result = self.cli_runner.invoke( cmd_group.group_remove_nodes, ['--force', '--group=dummygroup1', node_01.uuid]) self.assertIsNone(result.exception, result.output) # Check if node is added in group using group show command result = self.cli_runner.invoke(cmd_group.group_show, ['-r', 'dummygroup1']) self.assertClickResultNoException(result) self.assertNotIn('CalculationNode', result.output) self.assertNotIn(str(node_01.pk), result.output) # Add all three nodes and then use `verdi group remove-nodes --clear` to remove them all group = orm.load_group(label='dummygroup1') group.add_nodes([node_01, node_02, node_03]) self.assertEqual(group.count(), 3) result = self.cli_runner.invoke( cmd_group.group_remove_nodes, ['--force', '--clear', '--group=dummygroup1']) self.assertClickResultNoException(result) self.assertEqual(group.count(), 0)
def main(code_string, resources, group_name, sleep_seconds=60): group = load_group(group_name) structure = get_structure() node_low_kpoints = launch_aiida_low_kpoints(structure, code_string, resources, "Si VASP calc on kpoints(441)") group.add_nodes(node_low_kpoints) while True: if node_low_kpoints.is_terminated: break print("Waiting for relaxation calculation to be done.") sleep(sleep_seconds) if node_low_kpoints.is_finished_ok: structure = node_relax.outputs.structure_relaxed.clone() node = launch_aiida_medium_kpoints( structure, code_string, resources, "Si VASP calc on kpoints(771)") group.add_nodes(node) print(node) else: print("Relaxation calculation on kpoints(771) failed.") while True: if node_medium_kpoints.is_terminated: break print("Waiting for relaxation on kpoints(771) calculation to be done.") sleep(sleep_seconds) if node_medium_kpoints.is_finished_ok: structure = node_relax.outputs.structure_relaxed.clone() node = launch_aiida_high_kpoints( structure, code_string, resources, "Si VASP calc on kpoints(13131)") group.add_nodes(node) print(node) else: print("Relaxation calculation failed.")
def test_import_to_group(self, temp_dir): """Test `group` parameter Make sure an unstored Group is stored by the import function, forwarding the Group object. Make sure the Group is correctly handled and used for imported nodes. """ from aiida.orm import load_group from aiida.tools.importexport.common.exceptions import ImportValidationError # Create Nodes to export data1 = orm.Data().store() data2 = orm.Data().store() node_uuids = [node.uuid for node in [data1, data2]] # Export Nodes filename = os.path.join(temp_dir, 'export.aiida') export([data1, data2], filename=filename, silent=True) self.reset_database() # Create Group, do not store group_label = 'import_madness' group = orm.Group(label=group_label) group_uuid = group.uuid # Try to import to this Group, providing only label - this should fail with self.assertRaises(ImportValidationError) as exc: import_data(filename, group=group_label, silent=True) self.assertIn('group must be a Group entity', str(exc.exception)) # Import properly now, providing the Group object import_data(filename, group=group, silent=True) # Check Group for content builder = orm.QueryBuilder().append(orm.Group, filters={'label': group_label}, project='uuid') self.assertEqual( builder.count(), 1, msg=f'There should be exactly one Group with label {group_label}. Instead {builder.count()} was found.' ) imported_group = load_group(builder.all()[0][0]) self.assertEqual(imported_group.uuid, group_uuid) self.assertEqual( imported_group.count(), len(node_uuids), msg='{} Nodes were found in the automatic import group, instead there should have been exactly {} ' 'Nodes'.format(imported_group.count(), len(node_uuids)) ) for node in imported_group.nodes: self.assertIn(node.uuid, node_uuids) # Import again, using a new Group, and make sure the automatic import Group also captures "existing" Nodes group_label = 'existing_import' group = orm.Group(label=group_label) group_uuid = group.uuid import_data(filename, group=group, silent=True) imported_group = load_group(label=group_label) self.assertEqual(imported_group.uuid, group_uuid) self.assertEqual( imported_group.count(), len(node_uuids), msg='{} Nodes were found in the automatic import group, instead there should have been exactly {} ' 'Nodes'.format(imported_group.count(), len(node_uuids)) ) for node in imported_group.nodes: self.assertIn(node.uuid, node_uuids)
def test_add_remove_nodes(self): """Test `verdi group remove-nodes` command.""" node_01 = orm.CalculationNode().store() node_02 = orm.CalculationNode().store() node_03 = orm.CalculationNode().store() result = self.cli_runner.invoke( cmd_group.group_add_nodes, ['--force', '--group=dummygroup1', node_01.uuid]) self.assertClickResultNoException(result) # Check if node is added in group using group show command result = self.cli_runner.invoke(cmd_group.group_show, ['dummygroup1']) self.assertClickResultNoException(result) self.assertIn('CalculationNode', result.output) self.assertIn(str(node_01.pk), result.output) # Remove same node result = self.cli_runner.invoke( cmd_group.group_remove_nodes, ['--force', '--group=dummygroup1', node_01.uuid]) self.assertIsNone(result.exception, result.output) # Check that the node is no longer in the group result = self.cli_runner.invoke(cmd_group.group_show, ['-r', 'dummygroup1']) self.assertClickResultNoException(result) self.assertNotIn('CalculationNode', result.output) self.assertNotIn(str(node_01.pk), result.output) # Add all three nodes and then use `verdi group remove-nodes --clear` to remove them all group = orm.load_group(label='dummygroup1') group.add_nodes([node_01, node_02, node_03]) self.assertEqual(group.count(), 3) result = self.cli_runner.invoke( cmd_group.group_remove_nodes, ['--force', '--clear', '--group=dummygroup1']) self.assertClickResultNoException(result) self.assertEqual(group.count(), 0) # Try to remove node that isn't in the group result = self.cli_runner.invoke(cmd_group.group_remove_nodes, ['--group=dummygroup1', node_01.uuid]) self.assertEqual(result.exit_code, ExitCode.CRITICAL) # Try to remove no nodes nor clear the group result = self.cli_runner.invoke(cmd_group.group_remove_nodes, ['--group=dummygroup1']) self.assertEqual(result.exit_code, ExitCode.CRITICAL) # Try to remove both nodes and clear the group result = self.cli_runner.invoke( cmd_group.group_remove_nodes, ['--group=dummygroup1', '--clear', node_01.uuid]) self.assertEqual(result.exit_code, ExitCode.CRITICAL) # Add a node with confirmation result = self.cli_runner.invoke(cmd_group.group_add_nodes, ['--group=dummygroup1', node_01.uuid], input='y') self.assertEqual(group.count(), 1) # Try to remove two nodes, one that isn't in the group, but abort result = self.cli_runner.invoke( cmd_group.group_remove_nodes, ['--group=dummygroup1', node_01.uuid, node_02.uuid], input='N') self.assertIn('Warning', result.output) self.assertEqual(group.count(), 1) # Try to clear all nodes from the group, but abort result = self.cli_runner.invoke(cmd_group.group_remove_nodes, ['--group=dummygroup1', '--clear'], input='N') self.assertIn('Are you sure you want to remove ALL', result.output) self.assertIn('Aborted', result.output) self.assertEqual(group.count(), 1)
case['pw_code'], case['upf_family'], mu_plus=case['charged'], code_runtime_options=case['runtime_options'], input_namelists=input_namelists ) elif 'restart_sites' in args.task: t, uuid = args.task.split('->') input_namelists = {} if args.inputfile: input_namelists = read_qe_inputs(args.inputfile) calc=load_node(uuid) # group has no process type if calc.process_type is None or calc.process_type == "": g = load_group(uuid) process_status_name = case['process_status'] Q = QueryCalculationsFromGroup(group_uuid=uuid, process_label="PwRelaxWorkChain", process_status_name=process_status_name ) r_uuid = Q.query_pw_nodes_with_exit_status(case['exit_status']) Q.query_nodes_tabulate() print("\n uuid found : {}".format(len(r_uuid))) if yes_or_no(" Run calculations?"): restart_supercells(uuid_list = r_uuid, group_name = g.label, input_namelists = input_namelists ) #
def test_group_name_and_type_change(self, temp_dir): """ Group's name and type columns have changed Change for columns: “name” --> “label” "type" --> "type_string" Furthermore, type_strings have been updated to: "" --> "user" "data.upf.family" --> "data.upf" "aiida.import" --> "auto.import" "autogroup.run" --> "auto.run" The new columns are called on group instances, and will fail if not present. A user created Group is validated to have the "user" value as a type_string. A UPF file is created and imported/uploaded as a UPF family, in order to create a Group with type_string="data.upf". Any import will create a Group with type_string "auto.import", which is checked. The type_string="auto.run" is not directly checked, but if the three checks above succeed, it is understood that "auto.run" is also correctly ex-/imported as the type_string content for the relevant Groups. """ from aiida.orm.nodes.data.upf import upload_upf_family # To be saved groups_label = ['Users', 'UpfData'] upf_filename = 'Al.test_file.UPF' # regular upf file version 2 header upf_contents = '\n'.join([ "<UPF version=\"2.0.1\">", 'Human readable section is completely irrelevant for parsing!', '<PP_HEADER', 'contents before element tag', "element=\"Al\"", 'contents following element tag', '>', ]) path_to_upf = os.path.join(temp_dir, upf_filename) with open(path_to_upf, 'w') as upf_file: upf_file.write(upf_contents) # Create Groups node1 = orm.CalculationNode().store() node2 = orm.CalculationNode().store() node1.seal() node2.seal() group_user = orm.Group(label=groups_label[0]).store() group_user.add_nodes([node1, node2]) upload_upf_family(temp_dir, groups_label[1], '') group_upf = orm.load_group(groups_label[1]) # Save uuids and type groups_uuid = [str(g.uuid) for g in [group_user, group_upf]] groups_type_string = [g.type_string for g in [group_user, group_upf]] # Assert correct type strings exists prior to export self.assertListEqual(groups_type_string, ['core', 'core.upf']) # Export node filename = os.path.join(temp_dir, 'export.aiida') export([group_user, group_upf], filename=filename) # Clean the database and reimport self.clean_db() import_data(filename) # Retrieve Groups and make sure exactly 3 are retrieved (including the "import group") builder = orm.QueryBuilder() builder.append(orm.Group, project=['uuid']) imported_groups = builder.all() self.assertEqual(builder.count(), 3) # Check uuids are the same after import imported_groups_uuid = [str(g[0]) for g in imported_groups] # We do not know the "import group"'s uuid, so go through known uuids for group_uuid in groups_uuid: self.assertIn(group_uuid, imported_groups_uuid) # Pop known uuid from imported_groups_uuid, eventually leaving # only the "import group" imported_groups_uuid.remove(group_uuid) # Load group imported_group = orm.load_group(group_uuid) # Check whether types are correctly imported self.assertIn(imported_group.type_string, groups_type_string) # Assert labels are imported correctly self.assertIn(imported_group.label, groups_label) # Check type_string content of "import group" import_group = orm.load_group(imported_groups_uuid[0]) self.assertEqual(import_group.type_string, 'core.import')