Exemplo n.º 1
0
    def remove_data_provider(self, dsd_model_id):
        to_remove = None
        for dp in self.data_providers:
            if dp.dsd.model_id == dsd_model_id:
                to_remove = dp

        if to_remove == None:
            message = "Data provider with model id '{0}' " \
                      "cannot be found. Therefore it cannot " \
                      "be removed." \
                      .format(dsd_model_id)
            raise AkashicError(message, 0, 0, ErrType.SYSTEM)

        self.data_providers.remove(to_remove)
        self.refresh_data_proviers_in_bridges()

        try:
            self.undefine_template(dsd_model_id)
        except CLIPSError as ce:
            print(ce)
            message = "Facts of type '{0}' are still present " \
                      "in environment. Please remove all facts " \
                      "related to that model, then try again." \
                      .format(dsd_model_id)
            raise AkashicError(message, 0, 0, ErrType.SYSTEM)
Exemplo n.º 2
0
    def remove_rule(self, rule_name):
        """ Removes CLIPS rule from the environment
        
        Parameters
        ----------
        rule_name : str
            CLIPS rule name
        """

        try:
            rule = self.env.find_rule(rule_name)
        except:
            message = "Rule with name '{0}' is not found. " \
                      .format(rule_name)
            raise AkashicError(message, 0, 0, ErrType.SYSTEM)
        if not rule:
            message = "Rule with name '{0}' is not found. " \
                      .format(rule_name)
            raise AkashicError(message, 0, 0, ErrType.SYSTEM)
        if not rule.deletable:
            message = "Rule with name '{0}' is not deletable. " \
                      .format(rule_name)
            raise AkashicError(message, 0, 0, ErrType.SYSTEM)

        rule.undefine()
Exemplo n.º 3
0
    def load(self, dsd_string):
        """ Loads data-provider specification from given string

        Parameters
        ----------
        dsd_string : str
            String containing data source definition

        Raises
        ------
        AkashicError
        """

        try:
            self.dsd = self.meta_model.model_from_str(dsd_string)
            return
        except RecursionError as re:
            message = "Infinite left recursion is detected. " \
                      "There was unknown syntactic error."
            raise AkashicError(message, 0, 0, ErrType.SYNTACTIC)
        except TextXSyntaxError as syntaxError:
            raise AkashicError(syntaxError.message, syntaxError.line,
                               syntaxError.col, ErrType.SYNTACTIC)
        except TextXSemanticError as semanticError:
            raise AkashicError(semanticError.message, semanticError.line,
                               semanticError.col, ErrType.SEMANTIC)
Exemplo n.º 4
0
    def check_field_types(self, use_json_as, operation, json_object):
        """ Checks JSON object field types against fields defined in DSD
        
        Parameters
        ----------
        use_json_as : str
            Value which defines if JSON originates from web server 
            'request' or 'response'
        operation : str
            Web service operation / method, 
            possible values: "create", "read_one", "read_multiple", 
                             "update", "delete"
        json_object : object
            Parsed JSON object

        Raises
        ------
        AkashicError
            If JSON object field types missmatch fields defined in DSD 
        """

        json_path = None
        if use_json_as == "response":
            json_path = lambda field : field.response_one_json_path
        elif use_json_as == "request":
            json_path = lambda field : "$." + field.field_name

        for field in self.dsd.fields:
            
            jsonpath_expr = parse(json_path(field))
            result = [match.value for match in jsonpath_expr.find(json_object)]

            if len(result) == 0:
                line, col = self.dsd._tx_parser \
                            .pos_to_linecol(field._tx_position)
                message = "Field '{0}' is not present in json object." \
                          .format(field.field_name)
                raise AkashicError(message, line, col, ErrType.SEMANTIC)

            if len(result) > 1:
                line, col = self.dsd._tx_parser \
                            .pos_to_linecol(field._tx_position)
                message = "More than one field with same name '{0}' " \
                          "is present in json object." \
                          .format(field.field_name)
                raise AkashicError(message, line, col, ErrType.SEMANTIC)

            expected_type = clips_to_py_type(field.type)

            if not isinstance(result[0], expected_type):
                line, col = self.dsd._tx_parser \
                            .pos_to_linecol(field._tx_position)
                message = "Type of field '{0}' does not match type from " \
                          "provided data. Expected '{1}', " \
                          "but received '{2}'." \
                          .format(field.field_name,
                                  str(field.type),
                                  py_to_clips_type(result[0].__class__))
                raise AkashicError(message, line, col, ErrType.SEMANTIC)
Exemplo n.º 5
0
    def check_if_dsd_provides_web_op(self, operation):
        if not hasattr(self.dsd, 'apis'):
            line, col = self.dsd._tx_parser \
                        .pos_to_linecol(self.dsd._tx_position)
            message = "Data source '{0}' does not provide web operations." \
                      .format(self.dsd.model_id)
            raise AkashicError(message, line, col, ErrType.SEMANTIC)

        if not hasattr(self.dsd.apis, operation):
            line, col = self.dsd._tx_parser \
                        .pos_to_linecol(self.dsd.apis._tx_position)
            message = "Data source '{0}' does not provide '{1}' operation." \
                      .format(self.dsd.model_id, operation)
            raise AkashicError(message, line, col, ErrType.SEMANTIC)
Exemplo n.º 6
0
 def check_web_reflection_data(self):
     if not hasattr(self.dsd, 'apis'):
         line, col = self.dsd._tx_parser \
                     .pos_to_linecol(self.dsd._tx_position)
         message = "Web reflection is turned on. There must " \
                   "be at least one defined api in DSD."
         raise AkashicError(message, line, col, ErrType.SEMANTIC)
Exemplo n.º 7
0
 def check_rule_name(self, rule_name):
     for rule in self.env.rules():
         if rule.name == rule_name:
             message = "Rule with name '{0}' already exists. " \
                       "Please change rule name and try again." \
                       .format(rule_name)
             raise AkashicError(message, 0, 0, ErrType.SYSTEM)
Exemplo n.º 8
0
    def check_url_mapping(self, operation):
        """ Checks single URL mapping
        
        Parameters
        ----------
        operation : object
            Web service operation / method, 

        Raises
        ------
        AkashicError
            If URL map fields missmatches fileds specified in DSD
        """

        # TODO: Add check if model-ids are ok
        d_line, d_col = self.dsd._tx_parser \
                            .pos_to_linecol(operation._tx_position)
        url_map = operation.url_map
        url_fields = []
        for m in re.finditer(r"\{(((?!\{|\}).)*)\}", url_map):
            url_fields.append(m.group(1))

        for ref_obj in operation.ref_models:
            ref = ref_obj.url_placement
            if ref in url_fields:
                url_fields.remove(ref)
            else:
                line, col = self.dsd._tx_parser \
                                .pos_to_linecol(ref_obj._tx_position)
                message = "Url placement '{0}' in operation " \
                          "cannot be found in url-map setting." \
                          .format(ref)
                raise AkashicError(message, 
                                   line, 
                                   col, 
                                   ErrType.SEMANTIC)
        
        if len(url_fields) > 0:
            fields_left_string = ", ".join(url_fields)
            message = "Following url palcements '{0}' inside " \
                      "of url-map setting '{1}' in operation " \
                      "are not referenced in settings." \
                      .format(fields_left_string, url_map)
            raise AkashicError(message, 
                               d_line, 
                               d_col, 
                               ErrType.SEMANTIC)
Exemplo n.º 9
0
    def import_bridge(self, bridge):
        if bridge.__class__.__name__ in self.bridges:
            message = "Bridge with class name '{0}' " \
                      "already exists." \
                      .format(bridge.__class__.__name__)
            raise AkashicError(message, 0, 0, ErrType.SYSTEM)

        if not hasattr(bridge, "exposed_functions"):
            message = "Bridge with class name '{0}' is malformed. " \
                      "'exposed_functions' array is not found." \
                      .format(bridge.__class__.__name__)

        for f in bridge.exposed_functions:
            if (not "function" in f) or (not "num_of_args" in f) or \
            (not "return_type" in f):
                message = "Function entry is malformed. " \
                          "'exposed_functions' array must contain" \
                          "dictionary with 'function' and 'num_of_args' " \
                          "and 'return_type' fields."
                raise AkashicError(message, 0, 0, ErrType.SYSTEM)

            if f["function"].__name__ in self.functions:
                message = "Function with name '{0}' is already " \
                          "defined in bridge with class name '{1}'." \
                          .format(f["function"].__name__,
                                  self.functions[f["function"].__name__] \
                                  ["bridge"].__class__.__name__)
                raise AkashicError(message, 0, 0, ErrType.SYSTEM)

            if f["function"].__name__ in self.built_in_functions:
                message = "Function with name '{0}' is already " \
                          "defined as built-in function." \
                          .format(f["function"].__name__)
                raise AkashicError(message, 0, 0, ErrType.SYSTEM)

            self.functions[f["function"].__name__] = {
                "bridge": bridge,
                "num_of_args": f["num_of_args"],
                "return_type": f["return_type"]
            }
            self.bridges[bridge.__class__.__name__] = bridge
            self.env.define_function(f["function"])
Exemplo n.º 10
0
    def construct_query(self, **kwargs):
        """ Constructs search query with provided data - dict

        Parameters
        ----------
        **kwargs: dict
            Dictionary of pairs 'query_field: value'

        Details
        -------
        First we define default search query, then we override 
        them with given fields
        
        Returns
        -------
        default_kwargs : dict
            Constructed search query in form of dictionary

        Raises
        ------
        AkashicError
            If query field is not defined in given data source definition
        """

        self.check_if_dsd_provides_web_op("read_multiple")

        default_kwargs = {

            self.dsd.apis.read_multiple.page_index_url_placement: \
                self.dsd.apis.read_multiple.default_page_index,
            self.dsd.apis.read_multiple.page_row_count_url_placement: \
                self.dsd.apis.read_multiple.default_page_row_count,

            self.dsd.apis.read_multiple.search_fields_url_placement:    "",
            self.dsd.apis.read_multiple.search_strings_url_placement:   "",
            self.dsd.apis.read_multiple.sort_field_url_placement: \
                self.get_primary_key_field().field_name,
            self.dsd.apis.read_multiple.sort_order_url_placement:       "ASC"
        }

        # Check if field url placements are right
        for key, value in kwargs.items():
            if key in default_kwargs:
                default_kwargs[key] = value
            else:
                line, col = \
                    self.dsd._tx_parser \
                    .pos_to_linecol(self.dsd.apis.read_multiple._tx_position)
                message = "Query field {0} is not defined " \
                          "in data source definition." \
                          .format(key)
                raise AkashicError(message, line, col, ErrType.SEMANTIC)

        return default_kwargs
Exemplo n.º 11
0
 def check_field_list(self):
     num_of_primary_keys = 0
     for field in self.dsd.fields:
         if field.use_as == "\"primary-key\"":
             num_of_primary_keys += 1
         
     if num_of_primary_keys != 1:
         line, col = self.dsd._tx_parser \
                     .pos_to_linecol(self.dsd._tx_position)
         message = "There must be one and only one primary-key field " \
                   "in DSD field list, but {0} found." \
                   .format(str(num_of_primary_keys))
         raise AkashicError(message, line, col, ErrType.SEMANTIC)
Exemplo n.º 12
0
    def insert_data_provider(self, data_provider):
        # Check if model_id is unique
        for dp in self.data_providers:
            if dp.dsd.model_id == data_provider.dsd.model_id:
                message = "Data provider with model id '{0}' " \
                          "already exists. Please change data " \
                          "provider model id and try again." \
                          .format(data_provider.dsd.model_id)
                raise AkashicError(message, 0, 0, ErrType.SYSTEM)

        # Add to the list
        self.data_providers.append(data_provider)

        self.define_templates_of_dsds([data_provider])
        self.refresh_data_proviers_in_bridges()
    def build_special_pattern(self, data_locator_table, used_vars, expression,
                              expression_object):
        diff_temp_count = self.count_different_templates(
            data_locator_table, used_vars)

        print("NUM of used vars for DLs: " + str(len(used_vars)))
        print("NUM of diff templates: " + str(diff_temp_count))
        print("NUM of reg tempaltes: " + \
            str(len(data_locator_table.table.items())))

        if diff_temp_count > 1:
            line, col = self.dsd._tx_parser \
                        .pos_to_linecol(expression_object._tx_position)
            message = "Total number of different templates referenced " \
                      "inside of single Special Expression must be 1, " \
                      "but {0} found.".format(diff_temp_count)
            raise AkashicError(message, line, col, ErrType.SEMANTIC)

        last_tempalte_name = "Error$5"
        for template_name, template in data_locator_table.table.items():
            last_tempalte_name = template_name
            clips_field_list = []

            field_list = [(k, v) for k, v in template.fields.items()]
            for i in range(0, len(field_list) - 1):
                field_name = field_list[i][0]
                field = field_list[i][1]
                if field.var_name in used_vars:
                    clips_field_list.append("(" + field_name + " " + \
                                            field.var_name  + \
                                            ")")

            # Build special field
            field_name = field_list[len(field_list) - 1][0]
            field = field_list[len(field_list) - 1][1]
            if field.var_name in used_vars:
                clips_field_list.append("(" + field_name + " " + \
                                        field.var_name + "&:" + \
                                        expression  + ")")

            if len(clips_field_list) > 0:
                clips_statement = "(" + template_name + " " + \
                                  " ".join(clips_field_list) + ")"
                return clips_statement
                clips_statement_list.append(clips_statement)

        return "( " + last_tempalte_name + " )"
Exemplo n.º 14
0
 def insert_rule(self, rule_name, rule):
     """ Inserts new CLIPS rule into the environment
     
     Parameters
     ----------
     rule_name : str
         CLIPS rule name
     rule : str
         CLIPS rule in string form
     """
     try:
         self.check_rule_name(rule_name)
         self.env.build(rule)
     except CLIPSError as ce:
         print(ce)
         message = "Error occured while adding rule '{0}', " \
                   "Rule with same name MAY be already present." \
                   .format(rule_name)
         raise AkashicError(message, 0, 0, ErrType.SYSTEM)
Exemplo n.º 15
0
    def fill_data_map(self, url_map, **kwargs):
        """ Generates real URL by filling given URL with provided dict data

        Parameters
        ----------
        url_map : str
            URL map is regular URL string containing '{variable_name}' 
            in places of real key data
        **kwargs: dict
            Dictionary of pairs 'variable_name: value'

        Details
        -------
        Here we use regular expression matcher to find all occurences 
        like '{variable_name}'.
        
        Returns
        -------
        1 : int
            If number of provided variables does not match number of 
            variables in url_map
        url_map : str
            Built real url
        """

        url_fields = []
        for m in re.finditer(r"\{(((?!\{|\}).)*)\}", url_map):
            url_fields.append(m.group(1))

        if len(url_fields) != len(kwargs.items()):
            message = "Failed to fill url map '{0}'. " \
                      "Insufficient number of arguments. " \
                      "Expected {1}, but found {2}" \
                      .format(url_map,
                              len(url_fields),
                              len(kwargs.items()))
            raise AkashicError(message, 0, 0, ErrType.SYSTEM)

        for key, value in kwargs.items():
            pattern = re.compile("\{" + key + "\}")
            url_map = re.sub(pattern, str(value), url_map)

        return url_map
Exemplo n.º 16
0
    def generate_clips_fact(self, json_object, json_path_func):
        """ Generic method that generates CLIPS fact 
            from given parsed JSON object

        Parameters
        ----------
        json_object : object
            Parsed JSON object
        json_path_func: function
            Function which determines which JSON path expression 
            will be used, possible functions: for single object & 
            for multitude of objects inside of array

        Details
        -------
        We use DSD model_id unique identifier for CLIPS fact name.
        CLIPS does not support BOOLEAN type, so we convert it to INTEGER.

        1. We loop through all fields of this data source definition.
        2. We locate DSD field in JSON object and read data from it.
        3. We translate BOOLEAN to INTEGER, if present.
        4. We add quotes to STRING type, if present.

        Returns
        -------
        clips_fact: str
            Generated CLIPS fact definition statement
        """

        clips_fact = "(" + str(self.dsd.model_id)
        clips_fields = []

        print("JSON")
        print(json.dumps(json_object))
        print("\n")

        for field in self.dsd.fields:
            jsonpath_expr = parse(str(json_path_func(field)))
            field_loc = [match.value for match in jsonpath_expr \
                                                  .find(json_object)]

            if not field_loc or len(field_loc) < 1:
                line, col = self.dsd._tx_parser \
                            .pos_to_linecol(field._tx_position)
                message = "Field '{0}' in DSD is not " \
                          "matched in provided JSON data." \
                          .format(field.field_name)
                raise AkashicError(message, line, col, ErrType.SEMANTIC)

            result = field_loc[0]

            # Resolve field value
            resolved_value = None
            if field.type == "INTEGER" or field.type == "FLOAT":
                resolved_value = result
            elif field.type == "BOOLEAN":
                if result == True:
                    resolved_value = 1
                else:
                    resolved_value = 0
            elif field.type == "STRING":
                resolved_value = "\"{0}\"" \
                                 .format(result)

            clips_fields.append("\t(" + str(field.field_name) + " " + \
                                str(resolved_value) + ")")

        clips_fact += "\n".join(clips_fields) + ")"
        return clips_fact