def _process_prop(self, obj): # obj = propUri, prop, processedData, _pre_save_data # !!!!!!! the merge_prop function will need to be relooked for # instances where we have multiple property entries i.e. a fieldList if not DEBUG: debug = False else: debug = False if debug: print("START RdfClass._process_prop -------------------\n") if len(make_list(obj['prop'])) > 1: obj = self.__merge_prop(obj) processors = obj['prop'].get("processors", []) _prop_uri = obj['propUri'] # process properties that are not in the form if isinstance(obj['prop'].get("new"), NotInFormClass) and \ not is_not_null(obj['prop'].get("old")): # process required properties if obj['prop'].get("required"): # run all processors: the processor determines how to # handle if there is old data if len(processors) > 0: for processor in processors.values(): obj = run_processor(processor, obj) # if the processors did not calculate a value for the # property attempt to calculte from the default # property settings if not obj['prop'].get('calcValue', False): obj_value = calculate_default_value(obj['prop']) obj['processedData'][obj['propUri']] = obj_value #else: # need to decide if you want to calculate properties # that are not required and not in the form # if the property is editable process the data elif obj['prop'].get("editable"): # if the old and new data are different #print(obj['prop'].get("new"), " != ", obj['prop'].get("old")) if clean_iri(obj['prop'].get("new")) != \ clean_iri(obj['prop'].get("old")): #print("true") # if the new data is null and the property is not # required mark property for deletion if not is_not_null(obj['prop'].get("new")) and not \ obj['prop'].get("required"): obj['processedData'][_prop_uri] = DeleteProperty() # if the property has new data elif is_not_null(obj['prop'].get("new")): if len(processors) > 0: for processor in processors.values(): obj = run_processor(processor, obj) if not obj['prop'].get('calcValue', False): obj['processedData'][_prop_uri] = \ obj['prop'].get("new") else: obj['processedData'][_prop_uri] = obj['prop'].get( "new") if debug: print("END RdfClass._process_prop -------------------\n") return obj
def _process_prop(self, obj): # obj = propUri, prop, processedData, _pre_save_data # !!!!!!! the merge_prop function will need to be relooked for # instances where we have multiple property entries i.e. a fieldList if not DEBUG: debug = False else: debug = False if debug: print("START RdfClass._process_prop -------------------\n") if len(make_list(obj['prop'])) > 1: obj = self.__merge_prop(obj) processors = obj['prop'].get("processors", []) _prop_uri = obj['propUri'] # process properties that are not in the form if isinstance(obj['prop'].get("new"), NotInFormClass) and \ not is_not_null(obj['prop'].get("old")): # process required properties if obj['prop'].get("required"): # run all processors: the processor determines how to # handle if there is old data if len(processors) > 0: for processor in processors.values(): obj = run_processor(processor, obj) # if the processors did not calculate a value for the # property attempt to calculte from the default # property settings if not obj['prop'].get('calcValue', False): obj_value = calculate_default_value(obj['prop']) obj['processedData'][obj['propUri']] = obj_value #else: # need to decide if you want to calculate properties # that are not required and not in the form # if the property is editable process the data elif obj['prop'].get("editable"): # if the old and new data are different #print(obj['prop'].get("new"), " != ", obj['prop'].get("old")) if clean_iri(obj['prop'].get("new")) != \ clean_iri(obj['prop'].get("old")): #print("true") # if the new data is null and the property is not # required mark property for deletion if not is_not_null(obj['prop'].get("new")) and not \ obj['prop'].get("required"): obj['processedData'][_prop_uri] = DeleteProperty() # if the property has new data elif is_not_null(obj['prop'].get("new")): if len(processors) > 0: for processor in processors.values(): obj = run_processor(processor, obj) if not obj['prop'].get('calcValue', False): obj['processedData'][_prop_uri] = \ obj['prop'].get("new") else: obj['processedData'][_prop_uri] = obj['prop'].get("new") if debug: print("END RdfClass._process_prop -------------------\n") return obj
def clean_iri(uri_string): '''removes the <> signs from a string start and end ags: uri_string: the uri string ''' return clean_iri(uri_string)
def rdf_api(api_name, id_value=None, ext=None): """View for displaying forms Args: api_name -- url path of the api (new, edit) ext -- url extension for the api ie (.json, .html) params: id -- the item to lookup """ if not DEBUG: debug = False else: debug = False if debug: print("START rdf_api blueprint.py ---------------------------\n") api_repsonder = falcon.API() _api_path = "|".join(remove_null([api_name, ext])) _api_exists = rdfw().api_exists(_api_path) if _api_exists is False: return render_template("error_page_template.html", error_message="The web address is invalid") api_uri = _api_exists.get("api_uri") # generate the api class base_url = "%s%s" % (request.url_root[:-1], url_for("app.base_path")) current_url = request.url base_api_url = "%s%sapi/" % (request.url_root[:-1], url_for("app.base_path")) api_url = request.base_url api_class = rdf_framework_api_factory(_api_path, base_url=base_url, current_url=current_url, base_api_url=base_api_url, api_url=api_url) # if request method is post if request.method == "POST": # let api load with post data api = api_class(id_value=id_value) # validate the form if api.validate(): # if validated save the form api.save() if api.save_state == "success": if debug: print("END rdf_api blueprint.py ---POST--------\n") return api.return_message # if not POST, check the args and api instance/extension else: api = api_class() api_data = rdfw().get_obj_data(api, id_value=id_value) #pp.pprint(api_data['form_data']) if not (len(api_data['query_data']) > 0): '''return render_template( "error_page_template.html", error_message="The item does not exist") ''' return abort(400) else: return_type = api.rdf_instructions.get("kds_returnType") if return_type == "file": repo_uri = clean_iri(list(api_data['obj_json'].values())[0]) repo_link = urlopen(repo_uri) repo_file = repo_link.read() # The File wrapper is causing issues in the live environment # need to delete before sending byte stream if debug: print("\t wsgi.file_wrapper pre: ",\ request.environ.get('wsgi.file_wrapper')) if request.environ.get('wsgi.file_wrapper') is not None: del (request.environ['wsgi.file_wrapper']) if debug: print("\t wsgi.file_wrapper post: ",\ request.environ.get('wsgi.file_wrapper')) if debug: print("END rdf_api blueprint.py --- file send ---\n") return send_file( io.BytesIO(repo_file), #return send_file(repo_link, attachment_filename="%s.%s" % (id_value, ext), mimetype=api.rdf_instructions.get("kds_mimeType")) else: #return "<pre>{}</pre>".format(json.dumps(api_data['obj_json'],indent=4)) if debug: print("END rdf_api blueprint.py --- json --------\n") return jsonify(api_data['obj_json'])
def rdf_api(api_name, id_value=None, ext=None): """View for displaying forms Args: api_name -- url path of the api (new, edit) ext -- url extension for the api ie (.json, .html) params: id -- the item to lookup """ if not DEBUG: debug = False else: debug = False if debug: print("START rdf_api blueprint.py ---------------------------\n") api_repsonder = falcon.API() _api_path = "|".join(remove_null([api_name, ext])) _api_exists = rdfw().api_exists(_api_path) if _api_exists is False: return render_template( "error_page_template.html", error_message="The web address is invalid") api_uri = _api_exists.get("api_uri") # generate the api class base_url = "%s%s" % (request.url_root[:-1], url_for("open_badge.base_path")) current_url = request.url base_api_url = "%s%sapi/" % (request.url_root[:-1], url_for("open_badge.base_path")) api_url = request.base_url api_class = rdf_framework_api_factory(_api_path, base_url=base_url, current_url=current_url, base_api_url=base_api_url, api_url=api_url) # if request method is post if request.method == "POST": # let api load with post data api = api_class(id_value=id_value) # validate the form if api.validate(): # if validated save the form api.save() if api.save_state == "success": if debug: print("END rdf_api blueprint.py ---POST--------\n") return api.return_message # if not POST, check the args and api instance/extension else: api = api_class() api_data = rdfw().get_obj_data(api, id_value=id_value) #pp.pprint(api_data['form_data']) if not (len(api_data['query_data']) > 0): '''return render_template( "error_page_template.html", error_message="The item does not exist") ''' return abort(400) else: return_type = api.rdf_instructions.get("kds_returnType") if return_type == "file": repo_uri = clean_iri(list(api_data['obj_json'].values())[0]) repo_link = urlopen(repo_uri) repo_file = repo_link.read() # The File wrapper is causing issues in the live environment # need to delete before sending byte stream if debug: print("\t wsgi.file_wrapper pre: ",\ request.environ.get('wsgi.file_wrapper')) if request.environ.get('wsgi.file_wrapper') is not None: del(request.environ['wsgi.file_wrapper']) if debug: print("\t wsgi.file_wrapper post: ",\ request.environ.get('wsgi.file_wrapper')) if debug: print("END rdf_api blueprint.py --- file send ---\n") return send_file(io.BytesIO(repo_file), #return send_file(repo_link, attachment_filename="%s.%s" % (id_value, ext), mimetype=api.rdf_instructions.get("kds_mimeType")) else: #return "<pre>{}</pre>".format(json.dumps(api_data['obj_json'],indent=4)) if debug: print("END rdf_api blueprint.py --- json --------\n") return jsonify(api_data['obj_json'])
def password_processor(processor, obj, prop, mode="save"): """Function handles application password actions Returns: modified passed in obj """ if DEBUG: debug = True else: debug = False if debug: print("START password_processor --------------------------\n") salt_url = "kdr_SaltProcessor" if mode == "save": # find the salt property _class_uri = obj['prop'].get("classUri") _class_properties = getattr(get_framework(), _class_uri).kds_properties salt_property = None # find the property Uri that stores the salt value for _class_prop in _class_properties.values(): _processors = clean_processors([make_list(\ _class_prop.get("kds_propertyProcessing",{}))]) for _processor in _processors.values(): if _processor.get("rdf_type") == salt_url: salt_property = _class_prop.get("kds_propUri") salt_processor_dict = _processor # if in save mode create a hashed password if mode == "save": # if the there is not a new password in the data return the obj if is_not_null(obj['prop']['new']) or obj['prop']['new'] != 'None': # if a salt has not been created call the salt processor if not obj['processedData'].get(salt_property): obj = salt_processor(salt_processor_dict, obj, mode, salt_property=salt_property) # create the hash salt = obj['processedData'].get(salt_property) _hash_value = sha256_crypt.encrypt(obj['prop']['new']+salt) # assign the hashed password to the processedData obj['processedData'][obj['propUri']] = _hash_value obj['prop']['calcValue'] = True if debug: print("END password_processor mode = save-------\n") return obj elif mode == "verify": # verify the supplied password matches the saved password if not len(obj.query_data) > 0: setattr(prop, "password_verified", False) return obj _class_uri = prop.kds_classUri _class_properties = getattr(get_framework(), _class_uri).kds_properties salt_property = None # find the property Uri that stores the salt value for _class_prop in _class_properties.values(): _processors = clean_processors([make_list(\ _class_prop.get("kds_propertyProcessing",{}))]) for _processor in _processors.values(): if _processor.get("rdf_type") == salt_url: salt_property = _class_prop.get("kds_propUri") salt_processor_dict = _processor # find the salt value in the query_data salt_value = None for subject, props in obj.query_data.items(): if clean_iri(props.get("rdf_type")) == _class_uri: salt_value = props.get(salt_property) hashed_password = props.get(prop.kds_propUri) break if debug: print(salt_value, " - ", hashed_password, " - ", prop.data) setattr(prop, "password_verified", \ sha256_crypt.verify(prop.data + salt_value, hashed_password)) if debug: print("END password_processor mode = verify -------\n") return obj if mode == "load": if debug: print("END password_processor mode = load -------\n") return obj return obj
def _run_save_query(self, save_query_obj, subject_uri=None): if not DEBUG: debug = False else: debug = True _save_query = save_query_obj.get("query") if debug: print("START RdfClass._run_save_query -------------------\n") if debug: print("triplestore: ", self.triplestore_url) if debug: print("repository: ", self.repository_url) if not subject_uri: subject_uri = save_query_obj.get("subjectUri") if debug: print("_save_query:\n", _save_query) if _save_query: # a "[" at the start of the save query denotes a blanknode # return the blanknode as the query result if _save_query[:1] == "[": object_value = _save_query else: # if there is no subject_uri create a new entry in the # repository if subject_uri == "<>" or save_query_obj.get("new_status"): if debug: print("Enter New") if self.kds_saveLocation == "repository": repository_result = requests.post( self.repository_url, data=_save_query, headers={"Content-type": "text/turtle"}) if debug: print("repository_result: ", repository_result, " ", repository_result.text) object_value = repository_result.text elif self.kds_saveLocation == "triplestore": triplestore_result = requests.post( url=self.triplestore_url, headers={"Content-Type": "text/turtle"}, data=_save_query) if debug: print("triplestore_result: ", triplestore_result, " ", triplestore_result.text) object_value = subject_uri elif self.kds_saveLocation == "elasticsearch": print ("**************** ES Connection NOT Built") elif self.kds_saveLocation == "sql_database": print ("**************** SQL Connection NOT Built") # if the subject uri exists send an update query to the # specified datastore else: if debug: print("Enter Update") if self.kds_saveLocation == "repository": _headers = {"Content-type": "application/sparql-update"} _url = clean_iri(subject_uri) repository_result = \ requests.patch(_url, data=_save_query, headers=_headers) if debug: print("repository_result: ", repository_result, " ", repository_result.text) object_value = iri(subject_uri) elif self.kds_saveLocation == "triplestore": _url = self.triplestore_url triplestore_result = requests.post(_url, data={"update":_save_query}) if debug: print("triplestore_result: ", triplestore_result, " ", triplestore_result.text) object_value = iri(subject_uri) elif self.kds_saveLocation == "elasticsearch": print ("**************** ES Connection NOT Built") elif self.kds_saveLocation == "sql_database": print ("**************** SQL Connection NOT Built") if debug: print("END RdfClass._run_save_query ----------------\n") return {"status": "success", "lastSave": { "objectValue": object_value} } else: if debug: print("Enter No Data to Save") if debug: print("END RdfClass._run_save_query ---NO DATA-------\n") return {"status": "success", "lastSave": { "objectValue": iri(subject_uri), "comment": "No data to Save"} }
def _run_save_query(self, save_query_obj, subject_uri=None): if not DEBUG: debug = False else: debug = True _save_query = save_query_obj.get("query") if debug: print("START RdfClass._run_save_query -------------------\n") if debug: print("triplestore: ", self.triplestore_url) if debug: print("repository: ", self.repository_url) if not subject_uri: subject_uri = save_query_obj.get("subjectUri") if debug: print("_save_query:\n", _save_query) if _save_query: # a "[" at the start of the save query denotes a blanknode # return the blanknode as the query result if _save_query[:1] == "[": object_value = _save_query else: # if there is no subject_uri create a new entry in the # repository if subject_uri == "<>" or save_query_obj.get("new_status"): if debug: print("Enter New") if self.kds_saveLocation == "repository": repository_result = requests.post( self.repository_url, data=_save_query, headers={"Content-type": "text/turtle"}) if debug: print("repository_result: ", repository_result, " ", repository_result.text) object_value = repository_result.text elif self.kds_saveLocation == "triplestore": triplestore_result = requests.post( url=self.triplestore_url, headers={"Content-Type": "text/turtle"}, data=_save_query) if debug: print("triplestore_result: ", triplestore_result, " ", triplestore_result.text) object_value = subject_uri elif self.kds_saveLocation == "elasticsearch": print("**************** ES Connection NOT Built") elif self.kds_saveLocation == "sql_database": print("**************** SQL Connection NOT Built") # if the subject uri exists send an update query to the # specified datastore else: if debug: print("Enter Update") if self.kds_saveLocation == "repository": _headers = { "Content-type": "application/sparql-update" } _url = clean_iri(subject_uri) repository_result = \ requests.patch(_url, data=_save_query, headers=_headers) if debug: print("repository_result: ", repository_result, " ", repository_result.text) object_value = iri(subject_uri) elif self.kds_saveLocation == "triplestore": _url = self.triplestore_url triplestore_result = requests.post( _url, data={"update": _save_query}) if debug: print("triplestore_result: ", triplestore_result, " ", triplestore_result.text) object_value = iri(subject_uri) elif self.kds_saveLocation == "elasticsearch": print("**************** ES Connection NOT Built") elif self.kds_saveLocation == "sql_database": print("**************** SQL Connection NOT Built") if debug: print("END RdfClass._run_save_query ----------------\n") return { "status": "success", "lastSave": { "objectValue": object_value } } else: if debug: print("Enter No Data to Save") if debug: print("END RdfClass._run_save_query ---NO DATA-------\n") return { "status": "success", "lastSave": { "objectValue": iri(subject_uri), "comment": "No data to Save" } }