def cmd_show(sssp_family, structure, raw): """Show details of a particular SSSP_FAMILY.""" from tabulate import tabulate rows = [] if structure: iterator = sssp_family.get_pseudos(structure).values() else: iterator = sssp_family.nodes try: sssp_family.get_parameters_node() except exceptions.NotExistent: echo.echo_critical( '{} does not have an associated `SsspParameters` node'.format( sssp_family)) for upf in iterator: rows.append([upf.element, upf.filename] + list(sssp_family.get_cutoffs(elements=(upf.element, )))) headers = ['Element', 'Pseudo', 'Cutoff wfc', 'Cutoff rho'] if raw: echo.echo(tabulate(sorted(rows), tablefmt='plain')) else: echo.echo(tabulate(sorted(rows), headers=headers))
def quicksetup( ctx, non_interactive, profile, email, first_name, last_name, institution, db_engine, db_backend, db_host, db_port, db_name, db_username, db_password, su_db_name, su_db_username, su_db_password, repository ): """Setup a new profile in a fully automated fashion.""" # pylint: disable=too-many-arguments,too-many-locals from aiida.manage.external.postgres import Postgres, manual_setup_instructions dbinfo_su = { 'host': db_host, 'port': db_port, 'user': su_db_username, 'password': su_db_password, } postgres = Postgres(interactive=not non_interactive, quiet=False, dbinfo=dbinfo_su) if not postgres.is_connected: echo.echo_critical('failed to determine the PostgreSQL setup') try: create = True if not postgres.dbuser_exists(db_username): postgres.create_dbuser(db_username, db_password) else: db_name, create = postgres.check_db_name(db_name) if create: postgres.create_db(db_username, db_name) except Exception as exception: echo.echo_error( '\n'.join([ 'Oops! quicksetup was unable to create the AiiDA database for you.', 'For AiiDA to work, please either create the database yourself as follows:', manual_setup_instructions(dbuser=su_db_username, dbname=su_db_name), '', 'Alternatively, give your (operating system) user permission to create postgresql databases' + 'and run quicksetup again.', '' ]) ) raise exception # The contextual defaults or `verdi setup` are not being called when `invoking`, so we have to explicitly define # them here, even though the `verdi setup` command would populate those when called from the command line. setup_parameters = { 'non_interactive': non_interactive, 'profile': profile, 'email': email, 'first_name': first_name, 'last_name': last_name, 'institution': institution, 'db_engine': db_engine, 'db_backend': db_backend, 'db_name': db_name, # from now on we connect as the AiiDA DB user, which may be forbidden when going via sockets 'db_host': db_host or 'localhost', 'db_port': db_port, 'db_username': db_username, 'db_password': db_password, 'repository': repository, } ctx.invoke(setup, **setup_parameters)
def match_comp_transport(ctx, param, computer, transport_type): """Check the computer argument against the transport type.""" if computer.transport_type != transport_type: echo.echo_critical( f'Computer {computer.label} has transport of type "{computer.transport_type}", not {transport_type}!' ) return computer
def _show_vesta(exec_name, structure_list): """ Plugin for VESTA This VESTA plugin was added by Yue-Wen FANG and Abel Carreras at Kyoto University in the group of Prof. Isao Tanaka's lab """ import tempfile import subprocess # pylint: disable=protected-access with tempfile.NamedTemporaryFile(mode='w+b', suffix='.cif') as tmpf: for structure in structure_list: tmpf.write(structure._exportcontent('cif')[0]) tmpf.flush() try: subprocess.check_output([exec_name, tmpf.name]) except subprocess.CalledProcessError: # The program died: just print a message echo.echo_info( 'the call to {} ended with an error.'.format(exec_name)) except OSError as err: if err.errno == 2: echo.echo_critical( "No executable '{}' found. Add to the path, " 'or try with an absolute path.'.format(exec_name)) else: raise
def _show_xcrysden(exec_name, object_list, **kwargs): """ Plugin for xcrysden """ import tempfile import subprocess if len(object_list) > 1: raise MultipleObjectsError( 'Visualization of multiple trajectories is not implemented') obj = object_list[0] # pylint: disable=protected-access with tempfile.NamedTemporaryFile(mode='w+b', suffix='.xsf') as tmpf: tmpf.write(obj._exportcontent('xsf', **kwargs)[0]) tmpf.flush() try: subprocess.check_output([exec_name, '--xsf', tmpf.name]) except subprocess.CalledProcessError: # The program died: just print a message echo.echo_info( 'the call to {} ended with an error.'.format(exec_name)) except OSError as err: if err.errno == 2: echo.echo_critical( "No executable '{}' found. Add to the path, " 'or try with an absolute path.'.format(exec_name)) else: raise
def detect_duplicate_uuid(table, apply_patch): """Detect and fix entities with duplicate UUIDs. Before aiida-core v1.0.0, there was no uniqueness constraint on the UUID column of the node table in the database and a few other tables as well. This made it possible to store multiple entities with identical UUIDs in the same table without the database complaining. This bug was fixed in aiida-core=1.0.0 by putting an explicit uniqueness constraint on UUIDs on the database level. However, this would leave databases created before this patch with duplicate UUIDs in an inconsistent state. This command will run an analysis to detect duplicate UUIDs in a given table and solve it by generating new UUIDs. Note that it will not delete or merge any rows. """ from aiida.manage.database.integrity.duplicate_uuid import deduplicate_uuids from aiida.manage.manager import get_manager manager = get_manager() manager._load_backend(schema_check=False) # pylint: disable=protected-access try: messages = deduplicate_uuids(table=table, dry_run=not apply_patch) except Exception as exception: # pylint: disable=broad-except echo.echo_critical('integrity check failed: {}'.format(str(exception))) else: for message in messages: echo.echo_info(message) if apply_patch: echo.echo_success('integrity patch completed') else: echo.echo_success('dry-run of integrity patch completed')
def _try_import(migration_performed, file_to_import, archive, group, migration, non_interactive, **kwargs): """Utility function for `verdi import` to try to import archive :param migration_performed: Boolean to determine the exception message to throw for `~aiida.tools.importexport.common.exceptions.IncompatibleArchiveVersionError` :param file_to_import: Absolute path, including filename, of file to be migrated. :param archive: Filename of archive to be migrated, and later attempted imported. :param group: AiiDA Group into which the import will be associated. :param migration: Whether or not to force migration of archive, if needed. :param non_interactive: Whether or not the user should be asked for input for any reason. :param kwargs: Key-word-arguments that _must_ contain: * `'extras_mode_existing'`: `import_data`'s `'extras_mode_existing'` keyword, determining import rules for Extras. * `'extras_mode_new'`: `import_data`'s `'extras_mode_new'` keyword, determining import rules for Extras. * `'comment_mode'`: `import_data`'s `'comment_mode'` keyword, determining import rules for Comments. """ from aiida.tools.importexport import import_data, IncompatibleArchiveVersionError # Checks expected_keys = ['extras_mode_existing', 'extras_mode_new', 'comment_mode'] for key in expected_keys: if key not in kwargs: raise ValueError("{} needed for utility function '{}' to use in 'import_data'".format(key, '_try_import')) # Initialization migrate_archive = False try: import_data(file_to_import, group, **kwargs) except IncompatibleArchiveVersionError as exception: if migration_performed: # Migration has been performed, something is still wrong crit_message = '{} has been migrated, but it still cannot be imported.\n{}'.format(archive, exception) echo.echo_critical(crit_message) else: # Migration has not yet been tried. if migration: # Confirm migration echo.echo_warning(str(exception).splitlines()[0]) if non_interactive: migrate_archive = True else: migrate_archive = click.confirm( 'Do you want to try and migrate {} to the newest export file version?\n' 'Note: This will not change your current file.'.format(archive), default=True, abort=True ) else: # Abort echo.echo_critical(str(exception)) except Exception: echo.echo_error('an exception occurred while importing the archive {}'.format(archive)) echo.echo(traceback.format_exc()) if not non_interactive: click.confirm('do you want to continue?', abort=True) else: echo.echo_success('imported archive {}'.format(archive)) return migrate_archive
def _import_ase(filename, **kwargs): """ Imports a structure in a number of formats using the ASE routines. """ from os.path import abspath from aiida.orm.data.structure import StructureData try: import ase.io except ImportError: echo.echo_critical("You have not installed the package ase. \n" "You can install it with: pip install ase") store = kwargs.pop('store') view_in_ase = kwargs.pop('view') echo.echo('importing structure from: \n {}'.format(abspath(filename))) filepath = abspath(filename) try: asecell = ase.io.read(filepath) new_structure = StructureData(ase=asecell) if store: new_structure.store() if view_in_ase: from ase.visualize import view view(new_structure.get_ase()) echo.echo(' Succesfully imported structure {}, ' '(PK = {})'.format(new_structure.get_formula(), new_structure.pk)) except ValueError as err: echo.echo_critical(err)
def show(nodes, given_format): """ Visualize StructureData objects """ from aiida.cmdline.commands.cmd_data.cmd_show import _show_jmol from aiida.cmdline.commands.cmd_data.cmd_show import _show_ase from aiida.cmdline.commands.cmd_data.cmd_show import _show_vesta from aiida.cmdline.commands.cmd_data.cmd_show import _show_vmd from aiida.cmdline.commands.cmd_data.cmd_show import _show_xcrysden from aiida.orm.data.structure import StructureData for node in nodes: if not isinstance(node, StructureData): echo.echo_critical("Node {} is of class {} instead " "of {}".format(node, type(node), StructureData)) if given_format == "ase": _show_ase(given_format, nodes) elif given_format == "jmol": _show_jmol(given_format, nodes) elif given_format == "vesta": _show_vesta(given_format, nodes) elif given_format == "vmd": _show_vmd(given_format, nodes) elif given_format == "xcrysden": _show_xcrysden(given_format, nodes) else: raise NotImplementedError( "The format {} is not yet implemented".format(given_format))
def detect_invalid_nodes(): """Scan the database for invalid nodes.""" from tabulate import tabulate from aiida.manage.database.integrity.sql.nodes import INVALID_NODE_SELECT_STATEMENTS from aiida.manage.manager import get_manager integrity_violated = False backend = get_manager().get_backend() for check in INVALID_NODE_SELECT_STATEMENTS: result = backend.execute_prepared_statement(check.sql, check.parameters) if result: integrity_violated = True echo.echo_warning(f'{check.message}:\n') echo.echo(tabulate(result, headers=check.headers)) if not integrity_violated: echo.echo_success('no integrity violations detected') else: echo.echo_critical('one or more integrity violations detected')
def _import_pwi(filename, **kwargs): """ Imports a structure from a quantumespresso input file. """ from os.path import abspath try: from qe_tools.parsers.pwinputparser import PwInputFile except ImportError: echo.echo_critical("You have not installed the package qe-tools. \n" "You can install it with: pip install qe-tools") store = kwargs.pop('store') view_in_ase = kwargs.pop('view') echo.echo('importing structure from: \n {}'.format(abspath(filename))) filepath = abspath(filename) try: inputparser = PwInputFile(filepath) new_structure = inputparser.get_structuredata() if store: new_structure.store() if view_in_ase: from ase.visualize import view view(new_structure.get_ase()) echo.echo(' Succesfully imported structure {}, ' '(PK = {})'.format(new_structure.get_formula(), new_structure.pk)) except ValueError as err: echo.echo_critical(err)
def code_duplicate(ctx, code, non_interactive, **kwargs): """Duplicate a code allowing to change some parameters.""" from aiida.common.exceptions import ValidationError from aiida.orm.utils.builders.code import CodeBuilder if kwargs.pop('on_computer'): kwargs['code_type'] = CodeBuilder.CodeType.ON_COMPUTER else: kwargs['code_type'] = CodeBuilder.CodeType.STORE_AND_UPLOAD if kwargs.pop('hide_original'): code.hide() code_builder = ctx.code_builder for key, value in kwargs.items(): if value is not None: setattr(code_builder, key, value) new_code = code_builder.new() try: new_code.store() new_code.reveal() except ValidationError as exception: echo.echo_critical('Unable to store the Code: {}'.format(exception)) echo.echo_success('Code<{}> {} created'.format(new_code.pk, new_code.full_label))
def rehash(nodes, entry_point): """Recompute the hash for nodes in the database The set of nodes that will be rehashed can be filtered by their identifier and/or based on their class. """ from aiida.orm.querybuilder import QueryBuilder if nodes: to_hash = [(node,) for node in nodes if isinstance(node, entry_point)] else: builder = QueryBuilder() builder.append(entry_point, tag='node') to_hash = builder.all() if not to_hash: echo.echo_critical('no matching nodes found') count = 0 for i, (node,) in enumerate(to_hash): if i % 100 == 0: echo.echo('.', nl=False) node.rehash() count += 1 echo.echo('') echo.echo_success('{} nodes re-hashed'.format(count))
def _gather_imports(archives, webpages) -> List[Tuple[str, bool]]: """Gather archives to import and sort into local files and URLs. :returns: list of (archive path, whether it is web based) """ from aiida.tools.importexport.common.utils import get_valid_import_links final_archives = [] # Build list of archives to be imported for archive in archives: if archive.startswith('http://') or archive.startswith('https://'): final_archives.append((archive, True)) else: final_archives.append((archive, False)) # Discover and retrieve *.aiida files at URL(s) if webpages is not None: for webpage in webpages: try: echo.echo_info(f'retrieving archive URLS from {webpage}') urls = get_valid_import_links(webpage) except Exception as error: echo.echo_critical( f'an exception occurred while trying to discover archives at URL {webpage}:\n{error}' ) else: echo.echo_success(f'{len(urls)} archive URLs discovered and added') final_archives.extend([(u, True) for u in urls]) return final_archives
def generate_worker(computer, mpinp, name, output_file, category): """Generate worker fire for a particular computer""" if computer.scheduler_type not in [ "fireworks", "fireworks_scheduler.default", "fireworks_scheduler.keepenv" ]: echo.echo_critical( "Can only generate worker for computer using 'fireworks' scheduler." ) return hostname = computer.hostname config = computer.get_configuration() username = config.get('username', DEFAULT_USERNAME) if name is None: name = f"Worker on {hostname} for {username} with mpinp: {mpinp}" worker = AiiDAFWorker(computer_id=hostname, mpinp=mpinp, username=username, name=name, category=category) worker.to_file(output_file)
def calculation_plugins(entry_point): """Print a list of registered calculation plugins or details of a specific calculation plugin.""" import json from aiida.common.exceptions import LoadingPluginFailed, MissingPluginError from aiida.plugins.entry_point import get_entry_point_names, load_entry_point if entry_point: try: plugin = load_entry_point('aiida.calculations', entry_point) except (LoadingPluginFailed, MissingPluginError) as exception: echo.echo_critical(exception) else: echo.echo_info(entry_point) echo.echo_info( plugin.__doc__ if plugin.__doc__ else 'no docstring available') echo.echo(json.dumps(plugin.process().get_description(), indent=4)) else: entry_points = get_entry_point_names('aiida.calculations') if entry_points: echo.echo('Registered calculation entry points:') for ep in entry_points: echo.echo("* {}".format(ep)) echo.echo('') echo.echo_info( 'Pass the entry point as an argument to display detailed information' ) else: echo.echo_error('No calculation plugins found')
def setup_code(non_interactive, **kwargs): """Add a Code.""" from aiida.common.exceptions import ValidationError if not non_interactive: pre, post = ensure_scripts(kwargs.pop('prepend_text', ''), kwargs.pop('append_text', ''), kwargs) kwargs['prepend_text'] = pre kwargs['append_text'] = post if kwargs.pop('on_computer'): kwargs['code_type'] = CodeBuilder.CodeType.ON_COMPUTER else: kwargs['code_type'] = CodeBuilder.CodeType.STORE_AND_UPLOAD code_builder = CodeBuilder(**kwargs) code = code_builder.new() try: code.store() code.reveal() # newly setup code shall not be hidden except ValidationError as err: echo.echo_critical( 'unable to store the code: {}. Exiting...'.format(err)) echo.echo_success('code "{}" stored in DB.'.format(code.label)) echo.echo_info('pk: {}, uuid: {}'.format(code.pk, code.uuid))
def calculation_inputcat(calculation, path): """ Show the contents of a file with relative PATH in the raw input folder of the CALCULATION. If PATH is not specified, the default input file path will be used, if defined by the calculation plugin class. """ from aiida.cmdline.utils.repository import cat_repo_files from aiida.plugins.entry_point import get_entry_point_from_class if path is None: path = calculation._DEFAULT_INPUT_FILE if path is None: cls = calculation.__class__ _, entry_point = get_entry_point_from_class( cls.__module__, cls.__name__) echo.echo_critical( '{} does not define a default input file. Please specify a path explicitly' .format(entry_point.name)) try: cat_repo_files(calculation, os.path.join('raw_input', path)) except ValueError as e: echo.echo_critical(e.message) except IOError as e: import errno # Ignore Broken pipe errors, re-raise everything else if e.errno == errno.EPIPE: pass else: raise
def start(foreground, number): """Start the daemon with NUMBER workers. If the NUMBER of desired workers is not specified, the default is used, which is determined by the configuration option `daemon.default_workers`, which if not explicitly changed defaults to 1. """ from aiida.engine.daemon.client import get_daemon_client client = get_daemon_client() echo.echo('Starting the daemon... ', nl=False) if foreground: command = ['verdi', '-p', client.profile.name, 'daemon', _START_CIRCUS_COMMAND, '--foreground', str(number)] else: command = ['verdi', '-p', client.profile.name, 'daemon', _START_CIRCUS_COMMAND, str(number)] try: currenv = get_env_with_venv_bin() subprocess.check_output(command, env=currenv, stderr=subprocess.STDOUT) # pylint: disable=unexpected-keyword-arg except subprocess.CalledProcessError as exception: click.secho('FAILED', fg='red', bold=True) echo.echo_critical(str(exception)) # We add a small timeout to give the pid-file a chance to be created with spinner(): time.sleep(1) response = client.get_status() print_client_response_status(response)
def calculation_outputls(calculation, path, color): """ Show the list of files in the directory with relative PATH in the raw input folder of the CALCULATION. If PATH is not specified, the base path of the retrieved folder will be used. Content can only be showm after the daemon has retrieved the remote files. """ from aiida.cmdline.utils.repository import list_repo_files if path is not None: fullpath = os.path.join(calculation._path_subfolder_name, path) else: fullpath = calculation._path_subfolder_name try: retrieved = calculation.out.retrieved except AttributeError: echo.echo_critical( "No 'retrieved' node found. Have the calculation files already been retrieved?" ) try: list_repo_files(retrieved, fullpath, color) except ValueError as exception: echo.echo_critical(exception.message)
def exportfamily(folder, group_name): """ Export a pseudopotential family into a folder. Call without parameters to get some help. """ import os from aiida.common.exceptions import NotExistent from aiida.orm import DataFactory # pylint: disable=invalid-name UpfData = DataFactory('upf') try: group = UpfData.get_upf_group(group_name) except NotExistent: echo.echo_critical("upf family {} not found".format(group_name)) # pylint: disable=protected-access for node in group.nodes: dest_path = os.path.join(folder, node.filename) if not os.path.isfile(dest_path): with open(dest_path, 'w') as dest: with node._get_folder_pathsubfolder.open( node.filename) as source: dest.write(source.read()) else: echo.echo_warning("File {} is already present in the " "destination folder".format(node.filename))
def group_delete(group, clear, force): """Delete a group. Note that a group that contains nodes cannot be deleted if it contains any nodes. If you still want to delete the group, use the `-c/--clear` flag to remove the contents before deletion. Note that in any case, the nodes themselves will not actually be deleted from the database. """ from aiida import orm label = group.label if group.count() > 0 and not clear: echo.echo_critical(( 'Group<{}> contains {} nodes. Pass `--clear` if you want to empty it before deleting the group' .format(label, group.count()))) if not force: click.confirm('Are you sure to delete Group<{}>?'.format(label), abort=True) if clear: group.clear() orm.Group.objects.delete(group.pk) echo.echo_success('Group<{}> deleted.'.format(label))
def _show_jmol(exec_name, trajectory_list, **kwargs): """ Plugin for jmol """ import tempfile import subprocess # pylint: disable=protected-access with tempfile.NamedTemporaryFile(mode='w+b') as handle: for trajectory in trajectory_list: handle.write(trajectory._exportcontent('cif', **kwargs)[0]) handle.flush() try: subprocess.check_output([exec_name, handle.name]) except subprocess.CalledProcessError: # The program died: just print a message echo.echo_info( 'the call to {} ended with an error.'.format(exec_name)) except OSError as err: if err.errno == 2: echo.echo_critical( "No executable '{}' found. Add to the path, " 'or try with an absolute path.'.format(exec_name)) else: raise
def _show_vmd(exec_name, structure_list): """ Plugin for vmd """ import tempfile import subprocess if len(structure_list) > 1: raise MultipleObjectsError("Visualization of multiple objects " "is not implemented") structure = structure_list[0] # pylint: disable=protected-access with tempfile.NamedTemporaryFile(suffix='.xsf') as tmpf: tmpf.write(structure._exportstring('xsf')[0]) tmpf.flush() try: subprocess.check_output([exec_name, tmpf.name]) except subprocess.CalledProcessError: # The program died: just print a message echo.echo_info( "the call to {} ended with an error.".format(exec_name)) except OSError as err: if err.errno == 2: echo.echo_critical( "No executable '{}' found. Add to the path, " "or try with an absolute path.".format(exec_name)) else: raise
def import_aiida_xyz(filename, vacuum_factor, vacuum_addition, pbc, dry_run): """ Import structure in XYZ format using AiiDA's internal importer """ from aiida.orm import StructureData with open(filename, encoding='utf8') as fobj: xyz_txt = fobj.read() new_structure = StructureData() pbc_bools = [] for pbc_int in pbc: if pbc_int == 0: pbc_bools.append(False) elif pbc_int == 1: pbc_bools.append(True) else: raise click.BadParameter('values for pbc must be either 0 or 1', param_hint='pbc') try: new_structure._parse_xyz(xyz_txt) # pylint: disable=protected-access new_structure._adjust_default_cell( # pylint: disable=protected-access vacuum_addition=vacuum_addition, vacuum_factor=vacuum_factor, pbc=pbc_bools) except (ValueError, TypeError) as err: echo.echo_critical(str(err)) _store_structure(new_structure, dry_run)
def match_comp_transport(ctx, param, computer, transport_type): """Check the computer argument against the transport type.""" if computer.get_transport_type() != transport_type: echo.echo_critical( 'Computer {} has transport of type "{}", not {}!'.format( computer.name, computer.get_transport_type(), transport_type)) return computer
def setup_code(non_interactive, **kwargs): """Setup a new code.""" from aiida.common.exceptions import ValidationError from aiida.orm.utils.builders.code import CodeBuilder if not non_interactive: try: pre, post = ensure_scripts(kwargs.pop('prepend_text', ''), kwargs.pop('append_text', ''), kwargs) except InputValidationError as exception: raise click.BadParameter( 'invalid prepend and or append text: {}'.format(exception)) kwargs['prepend_text'] = pre kwargs['append_text'] = post if kwargs.pop('on_computer'): kwargs['code_type'] = CodeBuilder.CodeType.ON_COMPUTER else: kwargs['code_type'] = CodeBuilder.CodeType.STORE_AND_UPLOAD code_builder = CodeBuilder(**kwargs) code = code_builder.new() try: code.store() code.reveal() except ValidationError as exception: echo.echo_critical('Unable to store the Code: {}'.format(exception)) echo.echo_success('Code<{}> {} created'.format(code.pk, code.full_label))
def calcjob_inputcat(calcjob, path): """ Show the contents of one of the calcjob input files. You can specify the relative PATH in the raw input folder of the CalcJob. If PATH is not specified, the default input file path will be used, if defined by the calcjob plugin class. """ # Get path from the given CalcJobNode if not defined by user if path is None: path = calcjob.get_option('input_filename') # Get path from current process class of CalcJobNode if still not defined if path is None: fname = calcjob.process_class.spec_options.get('input_filename') if fname and fname.has_default(): path = fname.default if path is None: # Still no path available echo.echo_critical( '"{}" and its process class "{}" do not define a default input file ' '(option "input_filename" not found).\n' 'Please specify a path explicitly.'.format( calcjob.__class__.__name__, calcjob.process_class.__name__)) try: content = calcjob.get_object_content(path) except (IOError, OSError) as exception: # Incorrect path or file not readable echo.echo_critical( 'Could not open input path "{}". Exception: {}'.format( path, exception)) else: echo.echo(content)
def profile_list(): """Display a list of all available profiles.""" try: config = get_config() except (exceptions.MissingConfigurationError, exceptions.ConfigurationError) as exception: # This can happen for a fresh install and the `verdi setup` has not yet been run. In this case it is still nice # to be able to see the configuration directory, for instance for those who have set `AIIDA_PATH`. This way # they can at least verify that it is correctly set. from aiida.manage.configuration.settings import AIIDA_CONFIG_FOLDER echo.echo_info('configuration folder: {}'.format(AIIDA_CONFIG_FOLDER)) echo.echo_critical(str(exception)) else: echo.echo_info('configuration folder: {}'.format(config.dirpath)) if not config.profiles: echo.echo_warning( 'no profiles configured: run `verdi setup` to create one') else: sort = lambda profile: profile.name highlight = lambda profile: profile.name == config.default_profile_name echo.echo_formatted_list(config.profiles, ['name'], sort=sort, highlight=highlight)
def inspect(archive, version, data, meta_data): """Inspect contents of an exported archive without importing it. By default a summary of the archive contents will be printed. The various options can be used to change exactly what information is displayed. """ from aiida.tools.importexport import Archive, CorruptArchive with Archive(archive) as archive_object: try: if version: echo.echo(archive_object.version_format) elif data: echo.echo_dictionary(archive_object.data) elif meta_data: echo.echo_dictionary(archive_object.meta_data) else: info = archive_object.get_info() data = sorted([(k.capitalize(), v) for k, v in info.items()]) data.extend( sorted([(k.capitalize(), v) for k, v in archive_object.get_data_statistics().items()])) echo.echo(tabulate.tabulate(data)) except CorruptArchive as exception: echo.echo_critical('corrupt archive: {}'.format(exception))