def setUpClass(cls): """ Basides the standard setup we need to add few more objects in the database to be able to explore different requests/filters/orderings etc. """ # call parent setUpClass method super(RESTApiTestCase, cls).setUpClass() # connect the app and the api # Init the api by connecting it the the app (N.B. respect the following # order, api.__init__) kwargs = dict(PREFIX=cls._url_prefix, PERPAGE_DEFAULT=cls._PERPAGE_DEFAULT, LIMIT_DEFAULT=cls._LIMIT_DEFAULT) cls.app = App(__name__) cls.app.config['TESTING'] = True api = AiidaApi(cls.app, **kwargs) # create test inputs cell = ((2., 0., 0.), (0., 2., 0.), (0., 0., 2.)) structure = StructureData(cell=cell) structure.append_atom(position=(0., 0., 0.), symbols=['Ba']) structure.store() cif = CifData(ase=structure.get_ase()) cif.store() parameter1 = ParameterData(dict={"a": 1, "b": 2}) parameter1.store() parameter2 = ParameterData(dict={"c": 3, "d": 4}) parameter2.store() kpoint = KpointsData() kpoint.set_kpoints_mesh([4, 4, 4]) kpoint.store() calc = Calculation() calc._set_attr("attr1", "OK") calc._set_attr("attr2", "OK") calc.store() calc.add_link_from(structure) calc.add_link_from(parameter1) kpoint.add_link_from(calc, link_type=LinkType.CREATE) calc1 = Calculation() calc1.store() from aiida.orm.computer import Computer dummy_computers = [{ "name": "test1", "hostname": "test1.epfl.ch", "transport_type": "ssh", "scheduler_type": "pbspro", }, { "name": "test2", "hostname": "test2.epfl.ch", "transport_type": "ssh", "scheduler_type": "torque", }, { "name": "test3", "hostname": "test3.epfl.ch", "transport_type": "local", "scheduler_type": "slurm", }, { "name": "test4", "hostname": "test4.epfl.ch", "transport_type": "ssh", "scheduler_type": "slurm", }] for dummy_computer in dummy_computers: computer = Computer(**dummy_computer) computer.store() # Prepare typical REST responses cls.process_dummy_data()
def run_api(App, Api, *args, **kwargs): """ Takes a flask.Flask instance and runs it. Parses command-line flags to configure the app. App: Class inheriting from Flask app class Api = flask_restful API class to be used to wrap the app *args: required by argparse *kwargs: List of valid parameters: prog_name: name of the command before arguments are parsed. Useful when api is embedded in a command, such as verdi restapi default_host: self-explainatory default_port: self-explainatory default_config_dir = directory containing the config.py file used to configure the RESTapi parse_aiida_profile= if True, parses an option to specify the AiiDA profile All other passed parameters are ignored. """ import aiida # Mainly needed to locate the correct aiida path # Unpack parameters and assign defaults if needed prog_name = kwargs['prog_name'] if 'prog_name' in kwargs else "" default_host = kwargs['default_host'] if 'default_host' in kwargs else \ "127.0.0.1" default_port = kwargs['default_port'] if 'default_port' in kwargs else \ "5000" default_config_dir = kwargs['default_config_dir'] if \ 'default_config_dir' in kwargs \ else os.path.join(os.path.split(os.path.abspath( aiida.restapi.__file__))[0], 'common') parse_aiida_profile = kwargs['parse_aiida_profile'] if \ 'parse_aiida_profile' in kwargs else False catch_internal_server = kwargs['catch_internal_server'] if\ 'catch_internal_server' in kwargs else False hookup = kwargs['hookup'] if 'hookup' in kwargs else False # Set up the command-line options parser = argparse.ArgumentParser(prog=prog_name, description='Hook up the AiiDA ' 'RESTful API') parser.add_argument("-H", "--host", help="Hostname of the Flask app " + \ "[default %s]" % default_host, dest='host', default=default_host) parser.add_argument("-P", "--port", help="Port for the Flask app " + \ "[default %s]" % default_port, dest='port', default=default_port) parser.add_argument("-c", "--config-dir", help="Directory with config.py for Flask app " + \ "[default {}]".format(default_config_dir), dest='config_dir', default=default_config_dir) # This one is included only if necessary if parse_aiida_profile: parser.add_argument("-p", "--aiida-profile", help="AiiDA profile to expose through the RESTful " "API [default: the default AiiDA profile]", dest="aiida_profile", default=None) # Two options useful for debugging purposes, but # a bit dangerous so not exposed in the help message. parser.add_argument("-d", "--debug", action="store_true", dest="debug", help=argparse.SUPPRESS) parser.add_argument("-w", "--wsgi-profile", action="store_true", dest="wsgi_profile", help=argparse.SUPPRESS) parsed_args = parser.parse_args(args) # Import the right configuration file confs = imp.load_source(os.path.join(parsed_args.config_dir, 'config'), os.path.join(parsed_args.config_dir, 'config.py')) import aiida.backends.settings as settings """ Set aiida profile General logic: if aiida_profile is parsed the following cases exist: aiida_profile: "default" --> default profile set in .aiida/config.json <profile> --> corresponding profile in .aiida/config.json None --> default restapi profile set in <config_dir>/config,py if aiida_profile is not parsed we assume default restapi profile set in <config_dir>/config.py """ if parse_aiida_profile and parsed_args.aiida_profile is not None: aiida_profile = parsed_args.aiida_profile elif confs.default_aiida_profile is not None: aiida_profile = confs.default_aiida_profile else: aiida_profile = "default" if aiida_profile != "default": settings.AIIDADB_PROFILE = aiida_profile else: pass # This way the default of .aiida/config.json will be used # Set the AiiDA environment. If already loaded, load_dbenv will raise an # exception # if not is_dbenv_loaded(): load_dbenv() # Instantiate an app app_kwargs = dict(catch_internal_server=catch_internal_server) app = App(__name__, **app_kwargs) # Config the app app.config.update(**confs.APP_CONFIG) # cors cors_prefix = os.path.join(confs.PREFIX, "*") cors = CORS(app, resources={r"" + cors_prefix: {"origins": "*"}}) # Config the serializer used by the app if confs.SERIALIZER_CONFIG: from aiida.restapi.common.utils import CustomJSONEncoder app.json_encoder = CustomJSONEncoder # If the user selects the profiling option, then we need # to do a little extra setup if parsed_args.wsgi_profile: from werkzeug.contrib.profiler import ProfilerMiddleware app.config['PROFILE'] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) # Instantiate an Api by associating its app api_kwargs = dict(PREFIX=confs.PREFIX, PERPAGE_DEFAULT=confs.PERPAGE_DEFAULT, LIMIT_DEFAULT=confs.LIMIT_DEFAULT, custom_schema=confs.custom_schema) api = Api(app, **api_kwargs) # Check if the app has to be hooked-up or just returned if hookup: api.app.run(debug=parsed_args.debug, host=parsed_args.host, port=int(parsed_args.port), threaded=True) else: # here we return the app, and the api with no specifications on debug # mode, port and host. This can be handled by an external server, # e.g. apache2, which will set the host and port. This implies that # the user-defined configuration of the app is ineffective (it only # affects the internal werkzeug server used by Flask). return (app, api)
def setUpClass(cls, *args, **kwargs): # pylint: disable=too-many-locals, too-many-statements """ Basides the standard setup we need to add few more objects in the database to be able to explore different requests/filters/orderings etc. """ # call parent setUpClass method super(RESTApiTestCase, cls).setUpClass() # connect the app and the api # Init the api by connecting it the the app (N.B. respect the following # order, api.__init__) kwargs = dict(PREFIX=cls._url_prefix, PERPAGE_DEFAULT=cls._PERPAGE_DEFAULT, LIMIT_DEFAULT=cls._LIMIT_DEFAULT) cls.app = App(__name__) cls.app.config['TESTING'] = True AiidaApi(cls.app, **kwargs) # create test inputs cell = ((2., 0., 0.), (0., 2., 0.), (0., 0., 2.)) structure = orm.StructureData(cell=cell) structure.append_atom(position=(0., 0., 0.), symbols=['Ba']) structure.store() structure.add_comment('This is test comment.') structure.add_comment('Add another comment.') cif = orm.CifData(ase=structure.get_ase()) cif.store() parameter1 = orm.Dict(dict={'a': 1, 'b': 2}) parameter1.store() parameter2 = orm.Dict(dict={'c': 3, 'd': 4}) parameter2.store() kpoint = orm.KpointsData() kpoint.set_kpoints_mesh([4, 4, 4]) kpoint.store() resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 1} calcfunc = orm.CalcFunctionNode(computer=cls.computer) calcfunc.store() calc = orm.CalcJobNode(computer=cls.computer) calc.set_option('resources', resources) calc.set_attribute('attr1', 'OK') calc.set_attribute('attr2', 'OK') calc.set_extra('extra1', False) calc.set_extra('extra2', 'extra_info') calc.add_incoming(structure, link_type=LinkType.INPUT_CALC, link_label='link_structure') calc.add_incoming(parameter1, link_type=LinkType.INPUT_CALC, link_label='link_parameter') aiida_in = 'The input file\nof the CalcJob node' # Add the calcjob_inputs folder with the aiida.in file to the CalcJobNode repository with tempfile.NamedTemporaryFile(mode='w+') as handle: handle.write(aiida_in) handle.flush() handle.seek(0) calc.put_object_from_filelike(handle, key='calcjob_inputs/aiida.in', force=True) calc.store() # create log message for calcjob import logging from aiida.common.log import LOG_LEVEL_REPORT from aiida.common.timezone import now from aiida.orm import Log log_record = { 'time': now(), 'loggername': 'loggername', 'levelname': logging.getLevelName(LOG_LEVEL_REPORT), 'dbnode_id': calc.id, 'message': 'This is a template record message', 'metadata': { 'content': 'test' }, } Log(**log_record) aiida_out = 'The output file\nof the CalcJob node' retrieved_outputs = orm.FolderData() # Add the calcjob_outputs folder with the aiida.out file to the FolderData node with tempfile.NamedTemporaryFile(mode='w+') as handle: handle.write(aiida_out) handle.flush() handle.seek(0) retrieved_outputs.put_object_from_filelike( handle, key='calcjob_outputs/aiida.out', force=True) retrieved_outputs.store() retrieved_outputs.add_incoming(calc, link_type=LinkType.CREATE, link_label='retrieved') kpoint.add_incoming(calc, link_type=LinkType.CREATE, link_label='create') calc1 = orm.CalcJobNode(computer=cls.computer) calc1.set_option('resources', resources) calc1.store() dummy_computers = [{ 'name': 'test1', 'hostname': 'test1.epfl.ch', 'transport_type': 'ssh', 'scheduler_type': 'pbspro', }, { 'name': 'test2', 'hostname': 'test2.epfl.ch', 'transport_type': 'ssh', 'scheduler_type': 'torque', }, { 'name': 'test3', 'hostname': 'test3.epfl.ch', 'transport_type': 'local', 'scheduler_type': 'slurm', }, { 'name': 'test4', 'hostname': 'test4.epfl.ch', 'transport_type': 'ssh', 'scheduler_type': 'slurm', }] for dummy_computer in dummy_computers: computer = orm.Computer(**dummy_computer) computer.store() # Prepare typical REST responses cls.process_dummy_data()