def fix_configuration(configuration, context=None, fixer_classes=None): """ Validate the given configuration. @param configuration: The configuration to validate. @param context: The ValidationContext if it has been alread created. With value None The ValidationContext will be created and validated fisrt @param fixer_classes: The fixer classes to use for the validation. If None, all fixer classes will be used. @return: A ValidationContext objects, which contains a list of messaages of fixes that were executed. """ try: conf = configuration.get_configuration('duplicate_settings1.confml') data1 = conf.data except Exception: pass if context is None: context = validate_configuration(configuration) if fixer_classes is None: fixer_classes = get_fixer_classes() try: conf = configuration.get_configuration('duplicate_settings1.confml') data2 = conf.data except Exception: pass for fixer in fixer_classes: try: fixer().fix(context) except Exception, e: from cone.public import utils utils.log_exception(logging.getLogger('cone'), "Error fixing configuration: %s: %s" \ % (e.__class__.__name__, e))
def validate_impl_set(impl_set, configuration, validator_classes=None): """ Validate the given implementation set. @param impl_set: The implementations to validate. @param configuration: The configuration used in the validation. @param validator_classes: The validator classes to use for the validation. If None, all validator classes will be used. @return: A list of Problem objects. """ context = ValidationContext(configuration) context.all_impls = _get_flat_impl_list(impl_set) if validator_classes is None: validator_classes = get_validator_classes() # Run global validation first for vc in validator_classes: if issubclass(vc, GlobalValidatorBase): try: validator = vc(context) validator.validate() except Exception, e: utils.log_exception(logging.getLogger('cone'), "Error while validating: %s: %s" \ % (e.__class__.__name__, e))
def __getattr__(self, attrname): if attrname in self._data_providers: try: return self._data_providers[attrname].get_data() except Exception, e: utils.log_exception(logging.getLogger('cone'), "Exception getting %s: %s" % (attrname, e))
def generate_report(template_file, report_file, report_data, template_paths=[], extra_filters={}): """ Generate a report based on the given template file, report file and data dictionary. @param template_file: Path to the template file to use. @param report_file: Path to the output report file. @param report_data: The report data dictionary used when rendering the report from the template. @param template_paths: the additional search paths for templates. The default location cone.report is always included. @return: True if successful, False if not. """ template_paths.insert(0, ROOT_PATH) template_paths.insert(0, os.path.dirname(template_file)) template_paths = utils.distinct_array(template_paths) log.debug( 'generate_report(template_file=%r, report_file=%r, <data>, template_paths=%s)' % (template_file, report_file, template_paths)) if not isinstance(report_data, dict): raise ValueError("report_data must be a dictionary!") try: template_file = os.path.abspath(template_file) loader = FileSystemLoader(template_paths) env = Environment(loader=loader) set_filters(env, extra_filters) template = env.get_template(os.path.basename(template_file)) file_string = template.render(report_data) # Create directories for the report report_dir = os.path.dirname(report_file) if report_dir != '' and not os.path.exists(report_dir): os.makedirs(report_dir) # Write the rendered report to file f = open(report_file, 'wb') try: f.write(file_string.encode('utf-8')) finally: f.close() print "Generated report to '%s'" % report_file return True except Exception, e: utils.log_exception(log, "Failed to generate report: %s %s" % (type(e), e)) return False
def loads(self, jsonstr): """ @param xml: The xml which to read. reads only the first object. """ try: datadict = simplejson.loads(jsonstr) for key in datadict: reader = get_reader_for_elem(key) return reader.loads(datadict[key]) except (SyntaxError, ValueError),e: utils.log_exception(logging.getLogger('cone'), "Json string parse raised exception: %s!" % (e)) raise exceptions.ParseError("Json string %s parse raised exception: %s!" % (jsonstr,e))
def get_refs(self): """ Get a list of left side references and right side references. @return: left refs """ try: refs = [] tempast = ASTInterpreter() tempast.create_ast("%s" % self.left) for exp in tempast.expression_list: refs += exp.get_refs() except Exception, e: utils.log_exception(logging.getLogger('cone.rules'), "Exception in get_refs() of relation %r: %s" % (self, e)) return []
def get_flatconfig(self): """ Create a flat configuration from the current configuration with the given setting refs. Take the last configuration element, which will contain the data elements """ if not self.flatconfig: try: cf = confflattener.ConfigurationFlattener() self.flatconfig = api.Configuration() cf.flat(self.configuration, self.reader.settings, self.flatconfig) except (exceptions.ConeException, TypeError, Exception), e: utils.log_exception( self.logger, 'Failed to flat configuration with settings %s. Exception: %s' % (self.reader.settings, e)) raise exceptions.ConeException( 'Failed to flat configuration. Exception: %s' % e)
def validate_configuration(configuration, validator_classes=None): """ Validate the given configuration. @param configuration: The configuration to validate. @param validator_classes: The validator classes to use for the validation. If None, all validator classes will be used. @return: A ValidationContext objects, which contains a list of Problem objects in member variable problems. """ if validator_classes is None: validator_classes = get_validator_classes() context = ValidationContext(configuration) validators = [vc(context) for vc in validator_classes] for validator in validators: try: validator.validate() except Exception, e: from cone.public import utils utils.log_exception(logging.getLogger('cone'), "Error validating configuration: %s: %s" \ % (e.__class__.__name__, e))
#Waiting for process to complete retcode = pid.wait() #Storing stream information for possible further processing. self.set_streams(pid.stdin, pid.stdout, pid.stderr) if retcode < 0: self.logger.error("Child was terminated by signal %s" % (-retcode)) else: self.logger.info("Child returned: %s" % retcode) except OSError, e: self.logger.error("Execution failed: %s", repr(e)) self.handle_filters() except Exception, e: utils.log_exception(self.logger, "Failed to execute command: %s" % e) def set_logger(self, logger): self.logger = logger def __replace_helper_variables(self, inputstr, dictionary): retstr = inputstr for key in dictionary.keys(): retstr = retstr.replace(key, dictionary[key]) return retstr def solve_refs(self): """ Function to solve references just before generation. """
class ImplReader(object): """ Internal reader class for reading implementations from a file in a configuration. """ # The reader class list loaded using ImplFactory __loaded_reader_classes = None __reader_classes = None __supported_file_extensions = None __ignored_namespaces = None def __init__(self, resource_ref, configuration): self.resource_ref = resource_ref self.configuration = configuration @classmethod def _load_data_from_plugins(cls): """ Load all data needed for implementation parsing from the plug-ins. The actual loading is only done the first time this method is called. """ # Load the data only if the reader class list has not been loaded # yet or it has changed loaded_reader_classes = plugin.ImplFactory.get_reader_classes() if cls.__loaded_reader_classes is loaded_reader_classes: return reader_classes = [plugin.ReaderBase] reader_classes.extend(loaded_reader_classes) cls.__reader_classes = {} cls.__ignored_namespaces = [] cls.__supported_file_extensions = [] for rc in reader_classes: # Reader class ns = rc.NAMESPACE if ns is not None: if ns in cls.__reader_classes: raise RuntimeError("Multiple reader classes registered for ImplML namespace '%s': at least %s and %s"\ % (ns, rc, cls.__reader_classes[ns])) cls.__reader_classes[ns] = rc # Ignored namespaces for ns in rc.IGNORED_NAMESPACES: if ns not in cls.__ignored_namespaces: cls.__ignored_namespaces.append(ns) # Supported file extensions for fe in rc.FILE_EXTENSIONS: fe = fe.lower() if fe not in cls.__supported_file_extensions: cls.__supported_file_extensions.append(fe) cls.__loaded_reader_classes = loaded_reader_classes @classmethod def _get_namespaces(cls, etree): """ Return a list of XML namespaces in the given element tree. """ namespaces = [] namespaces.append(utils.xml.split_tag_namespace(etree.tag)[0]) for elem in etree: ns = utils.xml.split_tag_namespace(elem.tag)[0] if ns not in namespaces: namespaces.append(ns) return filter(lambda ns: ns is not None, namespaces) def _read_impls_from_file_root_element(self, root, namespaces): impls = [] reader_classes = self.get_reader_classes() # Go through the list of XML namespaces encountered in the # file and read an implementation using the corresponding # reader for each namespace impl_count = 0 common_data = CommonImplmlDataReader.read_data(root) for ns in namespaces: if ns not in reader_classes: continue rc = reader_classes[ns] impl = self._read_impl(rc, root) if impl: impl.index = impl_count impl_count += 1 if common_data: common_data.apply(impl) impls.append(impl) # Add temp feature definitions to the first implementation if common_data and impls: impls[0]._tempvar_defs.extend(common_data.tempvar_defs) return impls def _read_impls_from_file_sub_elements(self, root): impls = [] # Collect common ImplML namespace data common_data = CommonImplmlData() for elem in root: ns = utils.xml.split_tag_namespace(elem.tag)[0] if ns == COMMON_IMPLML_NAMESPACE: cd = CommonImplmlDataReader.read_data(elem) if cd: common_data.extend(cd) # Go through all sub-elements and read an implementation instance # from each if possible impl_count = 0 reader_classes = self.get_reader_classes() for elem in root: ns = utils.xml.split_tag_namespace(elem.tag)[0] if ns != COMMON_IMPLML_NAMESPACE and ns in reader_classes: reader_class = reader_classes[ns] impl = self._read_impl(reader_class, elem) if impl: cd = CommonImplmlDataReader.read_data(elem) if cd is not None: impl._tempvar_defs.extend(cd.tempvar_defs) data = common_data.copy() data.extend(cd) data.apply(impl) else: common_data.apply(impl) impl.index = impl_count impl_count += 1 impls.append(impl) # Add temporary feature definitions to the first implementation instance if impls: impls[0]._tempvar_defs = common_data.tempvar_defs + impls[ 0]._tempvar_defs return impls def _read_impl(self, reader_class, elem): """ Read an implementation with the given reader class from the given element. If an exception is raised during reading, the exception is logged and None returned. @return: The read implementation or None. """ try: return reader_class.read_impl(self.resource_ref, self.configuration, elem) except exceptions.ParseError, e: log.error("Error reading implementation '%s': %s", (self.resource_ref, e)) except Exception, e: utils.log_exception(log, e)
except Exception, e: utils.log_exception(logging.getLogger('cone'), "Error while validating: %s: %s" \ % (e.__class__.__name__, e)) # Then run validation for individual implementations for impl in context.all_impls: for vc in validator_classes: if issubclass(vc, ImplValidatorBase) and isinstance( impl, vc.SUPPORTED_IMPL_CLASSES): try: validator = vc(context, impl) validator.validate() except Exception, e: utils.log_exception(logging.getLogger('cone'), "Error validating '%s': %s: %s" \ % (impl, e.__class__.__name__, e)) return context.problems def _get_flat_impl_list(impl_set): """ Return a flat list of all implementations in the given set. """ result = [] def add_to_result(impl): result.append(impl) if isinstance(impl, plugin.ImplContainer): for sub_impl in impl.impls: add_to_result(sub_impl)
def generate(self, generation_context, ref): """ Generates output based on templates """ if self.outputs != None: for output in self.outputs: try: out_path = output.path out_filepath = os.path.join(out_path, output.filename) logging.getLogger('cone.templateml').debug( "Output file '%s', encoding '%s'" % (out_filepath, output.encoding)) out_file = generation_context.create_file( out_filepath, implementation=self.implementation) if output.template.path: output.template.template = _read_relative_file( generation_context.configuration, output.template.path, ref) dict_loader = DictLoader( {'template': output.template.template}) if output.newline == OutputFile.NEWLINE_WIN: env = Environment(loader=dict_loader, newline_sequence='\r\n') else: env = Environment(loader=dict_loader) # Common filters for filter in self.filters: if filter.path: filter.code = _read_relative_file( generation_context.configuration, filter.path, ref) if not filter.code: logging.getLogger('cone.templateml').warning( "Skipping empty filter definition.") else: # filter elements (lambda functions) have names if filter.name: env.filters[str(filter.name)] = eval( filter.code.replace('\r', '')) # filters elements (any python functions) do not have names else: funcs = {} exec(filter.code.strip().replace('\r', ''), funcs) for k, v in funcs.items(): env.filters[k] = v # Output file specific filters for filter in output.filters: if filter.path: filter.code = _read_relative_file( generation_context.configuration, filter.path, ref) if not filter.code: logging.getLogger('cone.templateml').warning( "Skipping empty filter definition.") else: if filter.name: env.filters[str(filter.name)] = eval( filter.code.replace('\r', '')) else: funcs = {} exec(filter.code.strip().replace('\r', ''), funcs) for k, v in funcs.items(): env.filters[k] = v template = env.get_template('template') file_string = template.render(self.context) out_file.write( self._encode_data(file_string, output.encoding, output.bom)) out_file.close() except Exception, e: utils.log_exception( logging.getLogger('cone.templateml'), '%r: Failed to generate output: %s: %s' % (self.implementation, type(e).__name__, e))