def __read_plugin_config_file(self): """ Reads a plugin config file and raises UserError if there is an issue reading the file. """ logger.info('Reading plugin config file %s', self.__plugin_config) try: with open(self.__plugin_config, 'rb') as f: try: return yaml.safe_load(f) except yaml.YAMLError as err: if hasattr(err, 'problem_mark'): mark = err.problem_mark raise exceptions.UserError( 'Command failed because the plugin config file ' 'provided as input \'{}\' was not valid yaml. ' 'Verify the file contents. ' 'Error position: {}:{}'.format( self.__plugin_config, mark.line + 1, mark.column + 1)) except (IOError, OSError) as err: raise exceptions.UserError( 'Unable to read plugin config file \'{}\'' '\nError code: {}. Error message: {}'.format( self.__plugin_config, err.errno, os.strerror(err.errno)))
def __validate_plugin_config_content(self): """ Validates the given plugin configuration is valid. The plugin configuration should include: id the plugin id name the plugin's displayed name version the plugin version hostTypes the list of supported hostTypes (UNIX and/or WINDOWS) entryPoint the entry point of the plugin defined by the decorator srcDir the directory that the source code is writen in schemaFile: the file containing defined schemas in the plugin manualDiscovery whether or not manual discovery is supported pluginType whether the plugin is DIRECT or STAGED language language of the source code(ex: PYTHON27 for python2.7) Args: plugin_config_content (dict): A dictionary representing a plugin configuration file. Raises: UserError: If the configuration is not valid. PathNotAbsoluteError: If the src and schema paths are not absolute. """ # First validate that all the expected keys are in the plugin config. plugin_schema = {} try: with open(self.__plugin_config_schema, 'r') as f: try: plugin_schema = json.load(f) except ValueError as err: raise exceptions.UserError( 'Failed to load schemas because {!r} is not a ' 'valid json file. Error: {}'.format( self.__plugin_config_schema, err)) except (IOError, OSError) as err: raise exceptions.UserError( 'Unable to read plugin config schema file {!r}' '\nError code: {}. Error message: {}'.format( self.__plugin_config_schema, err.errno, os.strerror(err.errno))) # Convert plugin config content to json plugin_config_json = json.loads( json.dumps(self.__plugin_config_content)) # Validate the plugin config against the schema v = Draft7Validator(plugin_schema) # # This will do lazy validation so that we can consolidate all the # validation errors and report everything wrong with the schema. # validation_errors = sorted(v.iter_errors(plugin_config_json), key=str) if validation_errors: raise exceptions.SchemaValidationError(self.__plugin_config, validation_errors)
def __validate_plugin_entry_point(self): """ Validates the plugin entry point by parsing the entry point to get module and entry point. """ # Get the module and entry point name to import entry_point_module, entry_point_object = self.split_entry_point( self.__plugin_config_content['entryPoint']) if not entry_point_module: raise exceptions.UserError('Plugin module is invalid') if not entry_point_object: raise exceptions.UserError('Plugin object is invalid')
def test_build_generate_artifact_fail( mock_generate_python, mock_plugin_manifest, mock_gen_artifact, plugin_config_file, artifact_file, codegen_gen_py_inputs, ): gen_py = codegen_gen_py_inputs # Before running build assert that the artifact file does not exist. assert not os.path.exists(artifact_file) # Raise UserError when codegen is called mock_gen_artifact.side_effect = exceptions.UserError("generate_artifact_error") with pytest.raises(exceptions.BuildFailedError) as err_info: build.build(plugin_config_file, artifact_file, False, False) message = err_info.value.message mock_generate_python.assert_called_once_with( gen_py.name, gen_py.source_dir, gen_py.plugin_content_dir, gen_py.schema_dict, ) mock_plugin_manifest.assert_called() mock_gen_artifact.assert_called() # After running build this file should not exist. assert not os.path.exists(artifact_file) assert "generate_artifact_error" in message assert "BUILD FAILED" in message
def __validate_plugin_entry_point(self, src_dir): """ Validates the plugin entry point by parsing the entry point to get module and entry point. Imports the module to check for errors or issues. Also does an eval on the entry point. """ entry_point_field = self.__plugin_config_content['entryPoint'] entry_point_strings = entry_point_field.split(':') # Get the module and entry point name to import entry_point_module = entry_point_strings[0] entry_point_object = entry_point_strings[1] plugin_type = self.__plugin_config_content['pluginType'] try: self.__plugin_manifest, self.__warnings = ( PluginValidator.__import_plugin(src_dir, entry_point_module, entry_point_object, plugin_type)) except ImportError as err: raise exceptions.UserError( 'Unable to load module \'{}\' specified in ' 'pluginEntryPoint \'{}\' from path \'{}\'. ' 'Error message: {}'.format(entry_point_module, entry_point_object, src_dir, err)) logger.debug("Got manifest %s", self.__plugin_manifest)
def __check_for_undefined_names(self): """ Checks the plugin module for undefined names. This catches missing imports, references to nonexistent variables, etc. ..note:: We are using the legacy flake8 api, because there is currently no public, stable api for flake8 >= 3.0.0 For more info, see https://flake8.pycqa.org/en/latest/user/python-api.html """ warnings = defaultdict(list) src_dir = self.__plugin_config_content['srcDir'] exclude_dir = os.path.sep.join([src_dir, CODEGEN_PACKAGE]) style_guide = flake8.get_style_guide(select=["F821"], exclude=[exclude_dir], quiet=1) style_guide.check_files(paths=[src_dir]) file_checkers = style_guide._application.file_checker_manager.checkers for checker in file_checkers: for result in checker.results: # From the api code, result is a tuple defined as: error = # (error_code, line_number, column, text, physical_line) if result[0] == 'F821': msg = "{} on line {} in {}".format(result[3], result[1], checker.filename) warnings['exception'].append(exceptions.UserError(msg)) if warnings and len(warnings) > 0: raise exceptions.ValidationFailedError(warnings)
def zip_and_encode_source_files(source_code_dir): """ Given a path, returns a zip file of all non .py files as a base64 encoded string *.py files are skipped to imitate the SDK's build script. We skip them because they cannot be imported in the secure context. Jython creates a class loader to import .py files which the security manager prohibits. """ # # The contents of the zip should have relative and not absolute paths or # else the imports won't work as expected. # cwd = os.getcwd() try: os.chdir(source_code_dir) ret_val = compileall.compile_dir(source_code_dir, ddir=".", force=True, quiet=True) if ret_val == 0: raise exceptions.UserError( "Failed to compile source code in the directory {}.".format( source_code_dir)) out_file = StringIO.StringIO() with zipfile.ZipFile(out_file, "w", zipfile.ZIP_DEFLATED) as zip_file: for root, _, files in os.walk("."): for filename in files: if not filename.endswith(".py"): logger.debug("Adding %s to zip.", os.path.join(root, filename)) zip_file.write(os.path.join(root, filename)) encoded_bytes = base64.b64encode(out_file.getvalue()) out_file.close() return encoded_bytes except OSError as os_err: raise exceptions.UserError( "Failed to read source code directory {}. Error code: {}." " Error message: {}".format(source_code_dir, os_err.errno, os.strerror(os_err.errno))) except UnicodeError as uni_err: raise exceptions.UserError( "Failed to base64 encode source code in the directory {}. " "Error message: {}".format(source_code_dir, uni_err.reason)) finally: os.chdir(cwd)
def upload(engine, user, upload_artifact, password, wait): """ Takes in the engine hostname/ip address, logs on and uploads the artifact passed in. The upload artifact should have been generated via the build command from a plugin. The file is expected to contain the delphix api version as well as be in the json format. During the process, print any errors that occur cleanly. Raises specifically: UserError InvalidArtifactError HttpError UnexpectedError PluginUploadJobFailed PluginUploadWaitTimedOut """ logger.debug('Upload parameters include' ' engine: {},' ' user: {},' ' upload_artifact: {},' ' wait: {}'.format(engine, user, upload_artifact, wait)) logger.info('Uploading plugin artifact {} ...'.format(upload_artifact)) # Read content of upload artifact try: with open(upload_artifact, 'rb') as f: try: content = json.load(f) except ValueError: raise exceptions.UserError( 'Upload failed because the upload artifact was not a valid' ' json file. Verify the file was built using the delphix' ' build command.') except IOError as err: raise exceptions.UserError( 'Unable to read upload artifact file \'{}\'' '\nError code: {}. Error message: {}'.format( upload_artifact, err.errno, errno.errorcode.get(err.errno, UNKNOWN_ERR))) # Create a new delphix session. client = delphix_client.DelphixClient(engine) engine_api = client.get_engine_api(content) client.login(engine_api, user, password) client.upload_plugin(os.path.basename(upload_artifact), content, wait)
def __read_schema_file(self): """ Reads a plugin schema file and raises UserError if there is an issue reading the file. """ try: with open(self.__schema_file, "r") as f: try: return json.load(f) except ValueError as err: raise exceptions.UserError( "Failed to load schemas because '{}' is not a " "valid json file. Error: {}".format( self.__schema_file, err)) except (IOError, OSError) as err: raise exceptions.UserError( "Unable to load schemas from '{}'" "\nError code: {}. Error message: {}".format( self.__schema_file, err.errno, os.strerror(err.errno)))
def __post(self, resource, content_type='application/json', data=None): """ Generates the http request post based on the resource. If no content_type is passed in assume it's a json. If the post fails we attempt to raise a specific error between HttpPostError and UnexpectedError. """ # Form HTTP header and add the cookie if one has been set. headers = {'Content-type': content_type} if self.__cookie is not None: logger.debug('Cookie being used: {}'.format(self.__cookie)) headers['Cookie'] = self.__cookie else: logger.debug('No cookie being used') url = 'http://{}/resources/json/{}'.format(self.__engine, resource) # # Issue post request that was passed in, if data is a dict then convert # it to a json string. # if data is not None and not isinstance(data, (str, bytes, unicode)): data = json.dumps(data) try: response = requests.post(url=url, data=data, headers=headers) except requests.exceptions.RequestException as err: raise exceptions.UserError('Encountered a http request failure.' '\n{}'.format(err)) # # Save cookie if one was received because the next time a request # happens the newest cookie is expected. If the post recent cookie # returned isn't used the request will fail. # if 'set-cookie' in response.headers: self.__cookie = response.headers['set-cookie'] logger.debug('New cookie found: {}'.format(self.__cookie)) try: response_json = response.json() except ValueError: raise exceptions.UnexpectedError(response.status_code, response.text) logger.debug('Response body: {}'.format(json.dumps(response_json))) if (response.status_code == 200 and (response_json.get('type') == 'OKResult' or response_json.get('type') == 'DataResult')): return response_json if response_json.get('type') == 'ErrorResult': raise exceptions.HttpError(response.status_code, response_json['error']) raise exceptions.UnexpectedError(response.status_code, json.dumps(response_json, indent=2))
def generate_upload_artifact(upload_artifact, plugin_output): # dump plugin_output JSON into upload_artifact file logger.info("Generating upload_artifact file at %s", upload_artifact) try: with open(upload_artifact, "w") as f: json.dump(plugin_output, f, indent=4) except IOError as err: raise exceptions.UserError( "Failed to write upload_artifact file to {}. Error code: {}." " Error message: {}".format(upload_artifact, err.errno, os.strerror(err.errno)))
def get_src_dir_path(config_file_path, src_dir): """ Validates 4 requirements of src_dir: - src_dir must be a relative path - src_dir must exist - src_dir must be a directory - src_dir must be a subdirectory of the plugin root Args: config_file_path: A path to the plugin's config file. The plugin's root is the directory containing the config file. No pre-processing is needed. src_dir: The path to the plugin's src directory. This is the path to be validated. Returns: str: A normalized, absolute path to the plugin's source directory. """ # Validate the the src directory is not an absolute path. Paths with # ~ in them are not considered absolute by os.path.isabs. src_dir = os.path.expanduser(src_dir) if os.path.isabs(src_dir): raise exceptions.PathIsAbsoluteError(src_dir) # The plugin root is the directory containing the plugin config file. # This is passed in by the CLI so it needs to be standardized and made # absolute for comparison later. plugin_root_dir = os.path.dirname(config_file_path) plugin_root_dir = standardize_path(plugin_root_dir) # The plugin's src directory is relative to the plugin root not to the # current working directory. os.path.abspath makes a relative path # absolute by appending the current working directory to it. The CLI # can be executed anywhere so it's not guaranteed that the cwd is the # plugin root. src_dir_absolute = standardize_path(os.path.join(plugin_root_dir, src_dir)) if not os.path.exists(src_dir_absolute): raise exceptions.PathDoesNotExistError(src_dir_absolute) if not os.path.isdir(src_dir_absolute): raise exceptions.PathTypeError(src_dir_absolute, 'directory') normcase_src_dir = os.path.normcase(src_dir_absolute) normcase_plugin_root = os.path.normcase(plugin_root_dir) if ( not normcase_src_dir.startswith(normcase_plugin_root) or normcase_src_dir == normcase_plugin_root ): raise exceptions.UserError( "The src directory {} is not a subdirectory " "of the plugin root at {}".format(src_dir_absolute, plugin_root_dir)) return src_dir_absolute
def test_command_user_error(mock_init, plugin_name): runner = click_testing.CliRunner() mock_init.side_effect = exceptions.UserError("codegen_error") result = runner.invoke(cli.delphix_sdk, ['init', '-n', plugin_name]) assert result.exit_code == 1 assert result.output == 'codegen_error\n' # 'DIRECT' and os.getcwd() are the expected defaults mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, plugin_name, const.UNIX_HOST_TYPE)
def __check_for_lua_name_and_min_version(self): """ Check if both lua name and minimum lua version are present if either property is set. """ warnings = defaultdict(list) if (self.__plugin_config_content.get('luaName') and not self.__plugin_config_content.get('minimumLuaVersion')): msg = ('Failed to process property "luaName" without ' '"minimumLuaVersion" set in the plugin config.') warnings['exception'].append(exceptions.UserError(msg)) if (self.__plugin_config_content.get('minimumLuaVersion') and not self.__plugin_config_content.get('luaName')): msg = ('Failed to process property "minimumLuaVersion" without ' '"luaName" set in the plugin config.') warnings['exception'].append(exceptions.UserError(msg)) if warnings and len(warnings) > 0: raise exceptions.ValidationFailedError(warnings)
def __report_warnings_and_exceptions(warnings): """ Prints the warnings and errors that were found in the plugin code, if the warnings dictionary contains the 'exception' key. """ if warnings and 'exception' in warnings: exception_msg = MessageUtils.exception_msg(warnings) exception_msg += '\n{}'.format(MessageUtils.warning_msg(warnings)) raise exceptions.UserError( '{}\n{} Warning(s). {} Error(s).'.format( exception_msg, len(warnings['warning']), len(warnings['exception'])))
def _import_helper(queue, src_dir, module): """Helper method to import the module and handle any import time exceptions. """ module_content = None sys.path.append(src_dir) try: module_content = importlib.import_module(module) except (ImportError, TypeError) as err: queue.put({'exception': err}) except Exception as err: # # We need to figure out if this is an error that was raised inside the # wrappers which would mean that it is a user error. Otherwise we # should still queue the error but specify that it's not a user error. # parent_class_list = [base.__name__ for base in err.__class__.__bases__] if 'PlatformError' in parent_class_list: # This is a user error error = exceptions.UserError(err.message) queue.put({'exception': error}) else: # # Because we don't know if the output of the err is actually in the # message, we just cast the exception to a string and hope to get # the most information possible. # error = exceptions.SDKToolingError(str(err)) queue.put({'sdk exception': error}) finally: sys.path.remove(src_dir) if not module_content: raise exceptions.UserError("Plugin module content is None") return module_content
def __validate_schemas(self): """ Validates the given plugin schemas are valid. Raises: UserError: If the schemas are not valid. """ plugin_meta_schema = {} try: with open(self.__plugin_meta_schema, "r") as f: try: plugin_meta_schema = json.load(f) except ValueError as err: raise exceptions.UserError( "Failed to load schemas because '{}' is not a " "valid json file. Error: {}".format( self.__plugin_meta_schema, err)) except (IOError, OSError) as err: raise exceptions.UserError( "Unable to read plugin schema file '{}'" "\nError code: {}. Error message: {}".format( self.__plugin_meta_schema, err.errno, os.strerror(err.errno))) # Validate the plugin schema against the meta schema v = Draft7Validator(plugin_meta_schema) # # This will do lazy validation so that we can consolidate all the # validation errors and report everything wrong with the schema. # validation_errors = sorted(v.iter_errors(self.__plugin_schemas), key=lambda e: e.path) if validation_errors: raise exceptions.SchemaValidationError(self.__schema_file, validation_errors)
def __read_plugin_config_file(self): """ Reads a plugin config file and raises UserError if there is an issue reading the file. """ try: with open(self.__plugin_config, "rb") as f: try: return yaml.safe_load(f) except yaml.YAMLError as err: if hasattr(err, "problem_mark"): mark = err.problem_mark raise exceptions.UserError( "Command failed because the plugin config file " "provided as input '{}' was not valid yaml. " "Verify the file contents. " "Error position: {}:{}".format( self.__plugin_config, mark.line + 1, mark.column + 1)) except (IOError, OSError) as err: raise exceptions.UserError( "Unable to read plugin config file '{}'" "\nError code: {}. Error message: {}".format( self.__plugin_config, err.errno, os.strerror(err.errno)))
def delphix_sdk(verbose, quiet): """ The tools of the Delphix Virtualization SDK that help develop, build, and upload a plugin. """ console_logging_level = get_console_logging_level(verbose, quiet) # # Now that we know the desired level, add in the console handler. Nothing # will be printed to the console until this is executed. # logging_util.add_console_handler(console_logging_level) if sys.version_info[:2] != (2, 7): raise exceptions.UserError( 'Python version check failed.' 'Supported version is 2.7.x, found {}'.format(sys.version_info))
def make_dir(path, force_remove): # # Delete the folder if it is there to clear the location. Ignore errors in # case the folder didn't exist. Since we'll be creating another dir at # that location, we should handle any errors when creating the dir. # if force_remove: shutil.rmtree(path, ignore_errors=True) try: os.mkdir(path) logger.debug('Successfully created directory {!r}'.format(path)) except OSError as err: raise exceptions.UserError( 'Unable to create new directory {!r}' '\nError code: {}. Error message: {}'.format( path, err.errno, os.strerror(err.errno)))
def __get(self, resource, content_type='application/json', stream=False): """ Generates the http request get based on the resource provided. If no content_type is passed in assume it's application/json. If the get fails we attempt to raise a specific error between HttpGetError and UnexpectedError. """ # Form HTTP header and add the cookie if one has been set. headers = {'Content-type': content_type} if self.__cookie is not None: logger.debug('Cookie being used: {}'.format(self.__cookie)) headers['Cookie'] = self.__cookie else: logger.debug('No cookie being used') url = 'http://{}/resources/json/{}'.format(self.__engine, resource) try: response = requests.get(url=url, headers=headers, stream=stream) except requests.exceptions.RequestException as err: raise exceptions.UserError('Encountered a http request failure.' '\n{}'.format(err)) # # Save cookie if one was received because the next time a request # happens the newest cookie is expected. If the post recent cookie # returned isn't used the request will fail. # if 'set-cookie' in response.headers: self.__cookie = response.headers['set-cookie'] logger.debug('New cookie found: {}'.format(self.__cookie)) if response.status_code == 200: return response else: response_error = None try: response_error = response.json()['error'] except ValueError: pass raise exceptions.HttpError(response.status_code, response_error)
def _validate_and_get_manifest(module, module_content, entry_point): """ Creates a plugin manifest indicating which plugin operations have been implemented by a plugin developer. Plugin_module_content is a module object which must have plugin_entry_point_name as one of its attributes. Args: module: name of the module imported module_content: plugin module content from import entry_point: name of entry point to the above plugin module Returns: dict: dictionary that represents plugin's manifest """ # This should never happen and if it does, flag a run time error. if module_content is None: raise RuntimeError('Plugin module content is None.') # # Schema validation on plugin config file would have ensured entry # is a string and should never happen its none - so raise a run time # error if it does. # if entry_point is None: raise RuntimeError('Plugin entry point object is None.') if not hasattr(module_content, entry_point): raise exceptions.UserError( 'Entry point \'{}:{}\' does not exist. \'{}\' is not a symbol' ' in module \'{}\'.'.format(module, entry_point, entry_point, module)) plugin_object = getattr(module_content, entry_point) if plugin_object is None: raise exceptions.UserError('Plugin object retrieved from the entry' ' point {} is None'.format(entry_point)) # Check which methods on the plugin object have been implemented. manifest = { 'type': 'PluginManifest', 'hasRepositoryDiscovery': bool(plugin_object.discovery.repository_impl), 'hasSourceConfigDiscovery': bool(plugin_object.discovery.source_config_impl), 'hasLinkedPreSnapshot': bool(plugin_object.linked.pre_snapshot_impl), 'hasLinkedPostSnapshot': bool(plugin_object.linked.post_snapshot_impl), 'hasLinkedStartStaging': bool(plugin_object.linked.start_staging_impl), 'hasLinkedStopStaging': bool(plugin_object.linked.stop_staging_impl), 'hasLinkedStatus': bool(plugin_object.linked.status_impl), 'hasLinkedWorker': bool(plugin_object.linked.worker_impl), 'hasLinkedMountSpecification': bool(plugin_object.linked.mount_specification_impl), 'hasVirtualConfigure': bool(plugin_object.virtual.configure_impl), 'hasVirtualUnconfigure': bool(plugin_object.virtual.unconfigure_impl), 'hasVirtualReconfigure': bool(plugin_object.virtual.reconfigure_impl), 'hasVirtualStart': bool(plugin_object.virtual.start_impl), 'hasVirtualStop': bool(plugin_object.virtual.stop_impl), 'hasVirtualPreSnapshot': bool(plugin_object.virtual.pre_snapshot_impl), 'hasVirtualPostSnapshot': bool(plugin_object.virtual.post_snapshot_impl), 'hasVirtualMountSpecification': bool(plugin_object.virtual.mount_specification_impl), 'hasVirtualStatus': bool(plugin_object.virtual.status_impl), 'hasInitialize': bool(plugin_object.virtual.initialize_impl) } return manifest
def init(root, ingestion_strategy, name, host_type): """ Creates a valid plugin in a given directory. The plugin created will be able to be built and uploaded immediately. This command is designed to help novice plugin developers. There are decisions made, such as what the entry point file looks like, in order to make it easier for authors to get started. The command is expected to be used by experienced developers, but they are not the primary audience. Args: root (str): The path of the plugin's root directory ingestion_strategy (str): The plugin type. Either DIRECT or STAGED name (str): The name of the plugin to display. host_type (list of str): The host type supported by the plugin """ logger.info('Initializing directory: %s', root) logger.debug( 'init parameters: %s', { 'Root': root, 'Ingestion Strategy': ingestion_strategy, 'Name': name, 'Host Types': host_type }) # Files paths based on 'root' to be used throughout src_dir_path = os.path.join(root, DEFAULT_SRC_DIRECTORY) config_file_path = os.path.join(root, DEFAULT_PLUGIN_CONFIG_FILE) schema_file_path = os.path.join(root, DEFAULT_SCHEMA_FILE) entry_point_file_path = os.path.join(src_dir_path, DEFAULT_ENTRY_POINT_FILE) # Make sure nothing is overwritten file_util.validate_paths_do_not_exist(config_file_path, schema_file_path, src_dir_path) # Make an UUID for the plugin plugin_id = str(uuid.uuid4()) logger.debug("Using %s as the plugin id.", plugin_id) # if name is not provided the name will be equal to plugin_id. if not name: name = plugin_id # # Some magic to get the yaml module to maintain the order when dumping # an OrderedDict # yaml.add_representer( OrderedDict, lambda dumper, data: dumper.represent_mapping( 'tag:yaml.org,2002:map', data.items())) logger.debug("Using %s as the plugin's entry point.", DEFAULT_ENTRY_POINT) try: # # Create the source directory. We've already validated that this # directory doesn't exist. # logger.info('Creating source directory at %r.', src_dir_path) os.mkdir(src_dir_path) # # Copy the schema file template into the root directory. The schema # file is static and doesn't depend on any input so it can just be # copied. By copying we can also avoid dealing with ordering issues. # logger.info('Writing schema file at %s.', schema_file_path) shutil.copyfile(SCHEMA_TEMPLATE_PATH, schema_file_path) # Read and valida the schema file result = plugin_util.read_and_validate_schema_file( schema_file_path, False) # Generate the definitions based on the schema file codegen.generate_python(name, src_dir_path, os.path.dirname(config_file_path), result.plugin_schemas) # # Create the plugin config file. The config file relies on input from # the user, so it's easier to deal with a dictionary than a file. This # must be done only after both the schema file and src dir have been # created since the paths need to exist. # logger.info('Writing config file at %s.', config_file_path) with open(config_file_path, 'w+') as f: config = _get_default_plugin_config(plugin_id, ingestion_strategy, name, DEFAULT_ENTRY_POINT, DEFAULT_SRC_DIRECTORY, DEFAULT_SCHEMA_FILE, host_type) yaml.dump(config, f, default_flow_style=False) # # Copy the entry point template into the root directory. The entry # point file is static and doesn't depend on any input so it can just # be copied. # logger.info('Writing entry file at %s.', entry_point_file_path) with open(entry_point_file_path, 'w+') as f: entry_point_content = _get_entry_point_contents( plugin_id, ingestion_strategy, host_type) f.write(entry_point_content) except Exception as e: logger.debug('Attempting to cleanup after failure. %s', e) file_util.delete_paths(config_file_path, schema_file_path, src_dir_path) raise exceptions.UserError( 'Failed to initialize plugin directory {}: {}.'.format(root, e))
def _execute_swagger_codegen(swagger_file, output_dir): jar = os.path.join(os.path.dirname(__file__), SWAGGER_JAR) codegen_config = os.path.join(os.path.dirname(__file__), CODEGEN_CONFIG) codegen_template = os.path.join(os.path.dirname(__file__), CODEGEN_TEMPLATE_DIR) # # Create the process that runs the jar putting stdout / stderr into pipes. # # The parameters to this execution include: # -DsupportPython2=true - says we want to generate the python classes to # be runnable with python 2. # -i <swagger_file> - specifies the file swagger will use to find # the schema definitions. # -l python-flask - specifies the language we want the generated # classes to be. python-flask unlike python has # both the to_dict and from_dict methods. # -c <codegen_config> - specifies the config file which we use to set # the outer package to be 'generated' # -t <codegen_template> - specifies the folder of mustache templates # used to generate the classes. The swagger code # passes in tags such as 'datatype' or 'name' # that then gets used to create the models. If # a specific mustache file doesn't exist then # the default template written by swagger is # used. model.mustache is the template for the # actual generated class. base_model_.mustache # becomes base_model_.py (We added some specific # exceptions here.) util.mustache becomes the # util.py file inside the outer package. and # __init__model.mustache is the __init__.py file # inside the module (definitions). # --model-package <module> - The module name to use, in this case # 'definitions'. With the -c + --model-package # option the dir structure created is: # 'generated/definitions' # -o <output_dir> - The location the files outputed from this jar # will be placed. # try: process_inputs = [ 'java', '-jar', jar, 'generate', '-DsupportPython2=true', '-i', swagger_file, '-l', 'python-flask', '-c', codegen_config, '-t', codegen_template, '--model-package', CODEGEN_MODULE, '-o', output_dir ] logger.info('Running process with arguments: {!r}'.format( ' '.join(process_inputs))) process = subprocess.Popen(process_inputs, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError as err: if err.errno == errno.ENOENT: raise exceptions.UserError('Swagger python code generation failed.' ' Make sure java is on the PATH.') raise exceptions.UserError( 'Unable to run {!r} to generate python code.' '\nError code: {}. Error message: {}'.format( jar, err.errno, os.strerror(err.errno))) # Get the pipes pointed so we have access to them. stdout, stderr = process.communicate() # # Wait for the process to end and take the results. If res then we know # something failed so throw a UserError. # if process.wait(): logger.error('stdout: {}'.format(stdout)) logger.error('stderr: {}'.format(stderr)) raise exceptions.UserError('Swagger python code generation failed.' 'See logs for more information.') # Print the stdout and err into the logs. logger.info('stdout: {}'.format(stdout)) logger.info('stderr: {}'.format(stderr))