def _reset(self, username=None): username = username or request.environ.get('REMOTE_USER', False) if not username: abort(401) try: user = h.get_user(username) except: abort(400) new_password = user.reset_password() msg = Message("*****@*****.**", user.email, "InPhO password reset") msg.plain = """ %(name)s, your password at the Indiana Philosophy Ontology (InPhO) has been changed to: Username: %(uname)s Password: %(passwd)s The Indiana Philosophy Ontology (InPhO) Team [email protected] """ % { 'passwd': new_password, 'uname': user.username, 'name': user.fullname or user.username or '' } msg.send() Session.commit() h.redirect(h.url(controller='account', action='reset_result'))
def _reset(self, username=None): username = username or request.environ.get('REMOTE_USER', False) if not username: abort(401) try: user = h.get_user(username) except: abort(400) new_password = user.reset_password() msg = Message("*****@*****.**", user.email, "InPhO password reset") msg.plain = """ %(name)s, your password at the Indiana Philosophy Ontology (InPhO) has been changed to: Username: %(uname)s Password: %(passwd)s The Indiana Philosophy Ontology (InPhO) Team [email protected] """ % {'passwd' : new_password, 'uname' : user.username, 'name' : user.fullname or user.username or ''} msg.send() Session.commit() h.redirect(h.url(controller='account', action='reset_result'))
def _delete_evaluation(self, evaltype, id, id2, uid=None, username=None): if not h.auth.is_logged_in(): abort(401) id2 = request.params.get('id2', id2) uid = request.params.get('uid', uid) username = request.params.get('username', username) evaluation = self._get_evaluation(id, id2, uid, username, autoCreate=False) if not evaluation: abort(404) current_uid = h.get_user(request.environ['REMOTE_USER']).ID if evaluation.uid != current_uid or not h.auth.is_admin(): abort(401) setattr(evaluation, evaltype, -1) # Delete evaluation if this eliminates both settings, new db schema # will eliminate this need #if evaluation.generality == -1 and evaluation.relatedness == -1: # h.delete_obj(evaluation) Session.flush() Session.commit() response.status_int = 200 return "OK"
def submit_changes(self): ''' This function validates the submitted profile edit form and commits the changes. Restricted to ``POST`` requests. If successful, redirects to the result action to prevent resubmission. ''' if not h.auth.is_logged_in(): abort(401) c.user = h.get_user(request.environ['REMOTE_USER']) if self.form_result['password'] != '': c.user.set_password(self.form_result['password']) # TODO: Enable area editing #c.user.first_area_id=self.form_result['first_area'], #user.first_area_level=self.form_result['first_area_level'], #if self.form_result['second_area']: # c.user.second_area_id=self.form_result['second_area'], # c.user.second_area_level=self.form_result['second_area_level'] c.user.fullname = self.form_result['fullname'] Session.flush() Session.commit() h.redirect(h.url(controller='account', action='profile', message='edited'))
def update_partial_graph(entity_type, occurrences): """ Takes an entity type and a SQL filename and only updates part of the graph. For use with single article statistical information. """ raise NotImplementedError # Import SQL statements if entity_type == Idea: table = "idea_graph_edges" type = IdeaGraphEdge elif entity_type == Thinker: table = "thinker_graph_edges" type = ThinkerGraphEdge else: table = "idea_thinker_graph_edges" type = IdeaThinkerGraphEdge edges = Session.query(type) # filter edges query to only the key term for ante, occurs in occurrences.iteritems(): for cons, occurs_in in occurs.iteritems(): # select the proper edge from result set # if edge does not exist, create it and add to session #update edge edge.occurs_in = occurs_in # commit changes Session.commit()
def submit_changes(self): ''' This function validates the submitted profile edit form and commits the changes. Restricted to ``POST`` requests. If successful, redirects to the result action to prevent resubmission. ''' if not h.auth.is_logged_in(): abort(401) c.user = h.get_user(request.environ['REMOTE_USER']) if self.form_result['password'] != '': c.user.set_password(self.form_result['password']) # TODO: Enable area editing #c.user.first_area_id=self.form_result['first_area'], #user.first_area_level=self.form_result['first_area_level'], #if self.form_result['second_area']: # c.user.second_area_id=self.form_result['second_area'], # c.user.second_area_level=self.form_result['second_area_level'] c.user.fullname = self.form_result['fullname'] Session.flush() Session.commit() h.redirect( h.url(controller='account', action='profile', message='edited'))
def _evaluate(self, evaltype, id, id2=None, uid=None, username=None, degree=-1, maxdegree=4, errors=0): """ Function to submit an evaluation. Takes a POST request containing the consequesnt id and all or none of: generality, relatedness, hyperrank, hyporank. """ id2 = request.params.get('id2', id2) uid = request.params.get('uid', uid) try: username = h.auth.get_username_from_cookie( request.params.get('cookieAuth', '')) except ValueError: # invalid IP, abort username = None print "grabbing eval for", username, uid if request.environ.get('REMOTE_USER', False): username = request.environ.get('REMOTE_USER', username) evaluation = self._get_evaluation(id, id2, None, username) elif username: evaluation = self._get_evaluation(id, id2, None, username) else: evaluation = self._get_anon_evaluation( id, id2, request.environ.get('REMOTE_ADDR', '0.0.0.0')) # Populate proper generality, relatedness, hyperrank and hyporank values evaluation.time = time.time() # Attempt to convert to integers, if unable, throw HTTP 400 try: setattr( evaluation, evaltype, int(request.params.get('degree', getattr(evaluation, evaltype)))) except TypeError: abort(400) # Create and commit evaluation try: Session.flush() Session.commit() except IntegrityError: Session.rollback() if not errors: self._evaluate(evaltype, id, id2, username, degree, maxdegree, errors + 1) # Issue an HTTP success response.status_int = 200 return "OK"
def complete_mining(entity_type=Idea, filename='graph.txt', root='./', corpus_root='corpus/', update_entropy=False, update_occurrences=False, update_db=False): occur_filename = os.path.abspath(root + "occurrences.txt") graph_filename = os.path.abspath(root + "graph-" + filename) edge_filename = os.path.abspath(root + "edge-" + filename) sql_filename = os.path.abspath(root + "sql-" + filename) doc_terms = doc_terms_list() if update_occurrences: print "processing articles..." process_articles(entity_type, occur_filename, corpus_root=corpus_root) print "filtering occurrences..." filter_apriori_input(occur_filename, graph_filename, entity_type, doc_terms) print "running apriori miner..." dm.apriori(graph_filename, edge_filename) print "processing edges..." edges = dm.process_edges(graph_filename, edge_filename, occur_filename, doc_terms) ents = dm.calculate_node_entropy(edges) edges = dm.calculate_edge_weight(edges, ents) print "creating sql files..." with open(sql_filename, 'w') as f: for edge, props in edges.iteritems(): ante, cons = edge row = "%s::%s" % edge row += ("::%(confidence)s::%(jweight)s::%(weight)s" "::%(occurs_in)s\n" % props) f.write(row) if update_entropy: print "updating term entropy..." for term_id, entropy in ents.iteritems(): term = Session.query(Idea).get(term_id) if term: term.entropy = entropy Session.flush() Session.commit() Session.close() if update_db: print "updating the database..." update_graph(entity_type, sql_filename)
def delete_obj(obj): """ Deletes any arbitrary object from the SQLAlchemy Session and cascades deletes to evaluations. :param obj: object to delete """ Session.delete(obj) Session.flush() Session.commit()
def _delete_date(self, id, id2): c.entity = h.fetch_obj(Entity, id, new_id=True) # get the date object date = self._get_date(id, id2) if date in c.entity.dates: idx = c.entity.dates.index(date) Session.delete(c.entity.dates[idx]) Session.commit() return "OK"
def complete_mining(entity_type=Idea, filename='graph.txt', root='./', corpus_root='corpus/', update_entropy=False, update_occurrences=False, update_db=False): occur_filename = os.path.abspath(root + "occurrences.txt") graph_filename = os.path.abspath(root + "graph-" + filename) edge_filename = os.path.abspath(root + "edge-" + filename) sql_filename = os.path.abspath(root + "sql-" + filename) doc_terms = doc_terms_list() if update_occurrences: print "processing articles..." process_articles(entity_type, occur_filename, corpus_root=corpus_root) print "filtering occurrences..." filter_apriori_input( occur_filename, graph_filename, entity_type, doc_terms) print "running apriori miner..." dm.apriori(graph_filename, edge_filename) print "processing edges..." edges = dm.process_edges( graph_filename, edge_filename, occur_filename, doc_terms) ents = dm.calculate_node_entropy(edges) edges = dm.calculate_edge_weight(edges, ents) print "creating sql files..." with open(sql_filename, 'w') as f: for edge, props in edges.iteritems(): ante,cons = edge row = "%s::%s" % edge row += ("::%(confidence)s::%(jweight)s::%(weight)s" "::%(occurs_in)s\n" % props) f.write(row) if update_entropy: print "updating term entropy..." for term_id, entropy in ents.iteritems(): term = Session.query(Idea).get(term_id) if term: term.entropy = entropy Session.flush() Session.commit() Session.close() if update_db: print "updating the database..." update_graph(entity_type, sql_filename)
def _delete_unary(self, type, id, id2=None): thinker = h.fetch_obj(Thinker, id) id2 = request.params.get('id2', id2) obj = h.fetch_obj(unary_vars[type]['object'], id2) if obj in getattr(thinker, unary_vars[type]['property']): getattr(thinker, unary_vars[type]['property']).remove(obj) Session.commit() response.status_int = 200 return "OK"
def searchpatterns(self, id): c.entity = h.fetch_obj(Entity, id, new_id=True) # add a new search pattern pattern = request.params.get('pattern', None) if pattern is None: abort(400) if pattern not in c.entity.searchpatterns: c.entity.searchpatterns.append(unicode(pattern)) Session.commit() return "OK"
def update(self, id=None): terms = ['label', 'sep_dir', 'last_accessed', 'language', 'openAccess', 'active', 'student', 'ISSN'] URL = request.params.get('URL', None) if URL is not None: journal = h.fetch_obj(Journal, id) if URL == 'none' or URL == 'None': journal.URL = None else: journal.URL = unquote(URL) journal.check_url() Session.commit() super(JournalController, self).update(id, terms)
def searchpatterns(self, id): c.entity = h.fetch_obj(Entity, id, new_id=True) # add a new search pattern pattern = request.params.get("pattern", None) if pattern is None: abort(400) if pattern not in c.entity.searchpatterns: c.entity.searchpatterns.append(unicode(pattern)) Session.commit() return "OK"
def triple(self, id): c.entity = h.fetch_obj(Thinker, id) #parese the triple triple = request.params.get('triple').split() subject_t = triple[0] predicate_t = triple[1] objectURLComponents = triple[2].split('/') #parse triple for last check = "no teacher or student" #lastComponentIndex = objectURLComponents.__len__()-1 object_t = objectURLComponents[-1] #- subject is the same as the id #- predicate is from the list and will be used in a if/elif/elif/elif/elif ... to see what database to add it to if "dbpedia.org" in objectURLComponents: object_t_label = object_t.replace("_", " ") obj = Thinker(object_t_label) obj.wiki = object_t elif "inpho.cogs.indiana.edu" in objectURLComponents: obj = h.fetch_obj(Thinker, object_t) '''if(inpho): obj = h.fetch_obj(Thinker, object_t) # returns the SQLAlchemy object elif(dbpedia) obj = Thinker(object_t) # returns the SQLAlchemy object ''' if predicate_t == 'ns1:influenced': c.entity.influenced.append(obj) elif predicate_t == 'ns1:influenced_by': c.entity.influenced_by.append(obj) elif predicate_t == 'ns1:student': c.entity.students.append(obj) elif predicate_t == 'ns1:teacher': c.entity.teachers.append(obj) ''' elif predicate == 'profession': elif predicate == 'birth_date': elif predicate == 'death_date': else predicate == 'nationality': ''' Session.commit() subject_to_display = subject_t.split("/")[len(subject_t.split("/")) - 1] predicate_to_display = predicate_t.split(":")[1] object_to_display = object_t return "OK : " + subject_to_display + " " + predicate_to_display + " " + object_to_display
def queries(self, id): c.entity = h.fetch_obj(Journal, id, new_id=True) # add a new search pattern pattern = request.params.get('pattern', None) if pattern is None: abort(400) pattern = unicode(pattern) if pattern not in c.entity.queries: c.entity.queries.append(unicode(pattern)) Session.commit() return "OK"
def triple(self, id): c.entity = h.fetch_obj(Thinker, id) #parese the triple triple = request.params.get('triple').split() subject_t = triple[0] predicate_t = triple[1] objectURLComponents = triple[2].split('/')#parse triple for last check = "no teacher or student" #lastComponentIndex = objectURLComponents.__len__()-1 object_t = objectURLComponents[-1] #- subject is the same as the id #- predicate is from the list and will be used in a if/elif/elif/elif/elif ... to see what database to add it to if "dbpedia.org" in objectURLComponents: object_t_label = object_t.replace("_"," ") obj = Thinker(object_t_label) obj.wiki = object_t elif "inpho.cogs.indiana.edu" in objectURLComponents: obj = h.fetch_obj(Thinker, object_t) '''if(inpho): obj = h.fetch_obj(Thinker, object_t) # returns the SQLAlchemy object elif(dbpedia) obj = Thinker(object_t) # returns the SQLAlchemy object ''' if predicate_t == 'ns1:influenced': c.entity.influenced.append(obj) elif predicate_t == 'ns1:influenced_by': c.entity.influenced_by.append(obj) elif predicate_t =='ns1:student': c.entity.students.append(obj) elif predicate_t == 'ns1:teacher': c.entity.teachers.append(obj) ''' elif predicate == 'profession': elif predicate == 'birth_date': elif predicate == 'death_date': else predicate == 'nationality': ''' Session.commit() subject_to_display=subject_t.split("/")[len(subject_t.split("/"))-1] predicate_to_display=predicate_t.split(":")[1] object_to_display=object_t return "OK : "+subject_to_display+" "+predicate_to_display+" "+object_to_display
def _delete_queries(self, id): c.entity = h.fetch_obj(Journal, id, new_id=True) # add a new search pattern pattern = request.params.get('pattern', None) if pattern is None: abort(400) # rudimentary input sanitization pattern = pattern.strip() if pattern in c.entity.queries: c.entity.queries.remove(pattern) Session.commit() return "OK"
def abbrs(self, id): c.entity = h.fetch_obj(Journal, id, new_id=True) # add a new search pattern pattern = request.params.get('pattern', None) if pattern is None: abort(400) # rudimentary input sanitization pattern = pattern.strip() if pattern not in c.entity.abbrs: c.entity.abbrs.append(unicode(pattern)) Session.commit() return "OK"
def _evaluate(self, evaltype, id, id2=None, uid=None, username=None, degree=-1, maxdegree=4, errors=0): """ Function to submit an evaluation. Takes a POST request containing the consequesnt id and all or none of: generality, relatedness, hyperrank, hyporank. """ id2 = request.params.get('id2', id2) uid = request.params.get('uid', uid) try: username = h.auth.get_username_from_cookie(request.params.get('cookieAuth', '')) except ValueError: # invalid IP, abort username = None print "grabbing eval for", username, uid if request.environ.get('REMOTE_USER', False): username = request.environ.get('REMOTE_USER', username) evaluation = self._get_evaluation(id, id2, None, username) elif username: evaluation = self._get_evaluation(id, id2, None, username) else: evaluation = self._get_anon_evaluation(id, id2, request.environ.get('REMOTE_ADDR', '0.0.0.0')) # Populate proper generality, relatedness, hyperrank and hyporank values evaluation.time = time.time() # Attempt to convert to integers, if unable, throw HTTP 400 try: setattr(evaluation, evaltype, int(request.params.get('degree', getattr(evaluation, evaltype)))) except TypeError: abort(400) # Create and commit evaluation try: Session.flush() Session.commit() except IntegrityError: Session.rollback() if not errors: self._evaluate(evaltype, id, id2, username, degree, maxdegree, errors+1) # Issue an HTTP success response.status_int = 200 return "OK"
def update(self, id=None): terms = [ 'label', 'sep_dir', 'last_accessed', 'language', 'openAccess', 'active', 'student', 'ISSN' ] URL = request.params.get('URL', None) if URL is not None: journal = h.fetch_obj(Journal, id) if URL == 'none' or URL == 'None': journal.URL = None else: journal.URL = unquote(URL) journal.check_url() Session.commit() super(JournalController, self).update(id, terms)
def submit(self): ''' This function validates the submitted registration form and creates a new user. Restricted to ``POST`` requests. If successful, redirects to the result action to prevent resubmission. ''' user = User( self.form_result['username'], fullname=self.form_result['fullname'], email=self.form_result['email'], first_area_id=self.form_result['first_area'], first_area_level=self.form_result['first_area_level'], second_area_id=self.form_result['second_area'], second_area_level=self.form_result['second_area_level'] ) Session.add(user) password = user.reset_password() Session.commit() msg = Message("*****@*****.**", self.form_result['email'], "InPhO registration") msg.plain = """Dear %(name)s, Thank you for registering with the Indiana Philosophy Ontology Project (InPhO). You can sign in at https://inpho.cogs.indiana.edu/signin with the following information: Username: %(uname)s Password: %(passwd)s You may change your password at https://inpho.cogs.indiana.edu/account/edit . The Indiana Philosophy Ontology Project (InPhO) Team [email protected] """ % {'passwd' : password, 'uname' : user.username, 'name' : user.fullname or user.username or ''} msg.send() h.redirect(h.url(controller='account', action='result'))
def _delete_searchpatterns(self, id): c.entity = h.fetch_obj(Entity, id, new_id=True) # add a new search pattern pattern = request.params.get('pattern', None) if pattern is None: abort(400) pattern = pattern.strip() # Boneheaded working around bogus associationproxy in SQLAlchemy 0.6.8 # Why this isn't just c.entity.searchpatterns.remove(pattern)? who knows for spattern in c.entity._spatterns: if spattern.searchpattern == pattern: Session.delete(spattern) Session.commit() return "OK"
def _delete_searchpatterns(self, id): c.entity = h.fetch_obj(Entity, id, new_id=True) # add a new search pattern pattern = request.params.get("pattern", None) if pattern is None: abort(400) pattern = pattern.strip() # Boneheaded working around bogus associationproxy in SQLAlchemy 0.6.8 # Why this isn't just c.entity.searchpatterns.remove(pattern)? who knows for spattern in c.entity._spatterns: if spattern.searchpattern == pattern: Session.delete(spattern) Session.commit() return "OK"
def submit(self): ''' This function validates the submitted registration form and creates a new user. Restricted to ``POST`` requests. If successful, redirects to the result action to prevent resubmission. ''' user = User(self.form_result['username'], fullname=self.form_result['fullname'], email=self.form_result['email'], first_area_id=self.form_result['first_area'], first_area_level=self.form_result['first_area_level'], second_area_id=self.form_result['second_area'], second_area_level=self.form_result['second_area_level']) Session.add(user) password = user.reset_password() Session.commit() msg = Message("*****@*****.**", self.form_result['email'], "InPhO registration") msg.plain = """Dear %(name)s, Thank you for registering with the Indiana Philosophy Ontology Project (InPhO). You can sign in at https://inphoproject.org/signin with the following information: Username: %(uname)s Password: %(passwd)s You may change your password at https://inphoproject.org/account/edit . The Indiana Philosophy Ontology Project (InPhO) Team [email protected] """ % { 'passwd': password, 'uname': user.username, 'name': user.fullname or user.username or '' } msg.send() h.redirect(h.url(controller='account', action='result'))
def update_obj(obj, attributes, params): """ Updates any arbitrary object. Takes a list of attributes and a dictionary of update parameters. Checks if each key is in the list of approved attributes and then attempts to set it. If the object does not have that key, throw an HTTP 400 Bad Request :param obj: object to update :param attributes: list of approved attributes :param params: dictionary of update parameters """ for key in params.keys(): if key in attributes: try: set_attribute(obj, key, params[key]) except: abort(400) Session.flush() Session.commit()
def date(self, id, id2, filetype="json"): """ Creates a date object, associated to the id with the relation type of id2. """ try: date = self._get_date(id, id2) except DateException as e: # TODO: Cleanup this workaround for the Pylons abort function not # passing along error messages properly to the error controller. response.status = 400 return str(e) try: Session.add(date) Session.commit() except IntegrityError: # skip over data integrity errors, since if the date is already in # the db, things are proceeding as intended. pass return "OK"
def date(self, id, id2, filetype='json'): """ Creates a date object, associated to the id with the relation type of id2. """ try: date = self._get_date(id, id2) except DateException as e: # TODO: Cleanup this workaround for the Pylons abort function not # passing along error messages properly to the error controller. response.status = 400 return str(e) try: Session.add(date) Session.commit() except IntegrityError: # skip over data integrity errors, since if the date is already in # the db, things are proceeding as intended. pass return "OK"
def complete_mining(entity_type=Idea, filename='graph.txt', root='./', corpus_root='corpus/', update_entropy=False): occur_filename = os.path.abspath(root + "graph-" + filename) edge_filename = os.path.abspath(root + "edge-" + filename) sql_filename = os.path.abspath(root + "sql-" + filename) print "processing articles..." process_articles(entity_type, occur_filename, corpus_root=corpus_root) print "running apriori miner..." dm.apriori(occur_filename, edge_filename) print "processing edges..." edges = dm.process_edges(occur_filename, edge_filename) ents = dm.calculate_node_entropy(edges) edges = dm.calculate_edge_weight(edges, ents) print "creating sql files..." with open(sql_filename, 'w') as f: for edge, props in edges.iteritems(): ante,cons = edge row = "%s::%s" % edge row += "::%(confidence)s::%(jweight)s::%(weight)s\n" % props f.write(row) print "updating term entropy..." if update_entropy: for term_id, entropy in ents.iteritems(): term = Session.query(Idea).get(term_id) if term: term.entropy = entropy Session.flush() Session.commit() Session.close() # Import SQL statements if entity_type == Idea: table = "idea_graph_edges" elif entity_type == Thinker: table = "thinker_graph_edges" else: table = "idea_thinker_graph_edges" connection = Session.connection() print "deleting old graph information ..." connection.execute(""" DELETE FROM %(table)s; """ % {'filename' : sql_filename, 'table' : table }) print "inserting new graph information" connection.execute(""" SET foreign_key_checks=0; LOAD DATA INFILE '%(filename)s' INTO TABLE %(table)s FIELDS TERMINATED BY '::' (ante_id, cons_id, confidence, jweight, weight); SET foreign_key_checks=1; """ % {'filename' : sql_filename, 'table' : table }) Session.close()
def complete_mining(entity_type=Idea, filename='graph.txt', root='./', corpus_root='corpus/', update_entropy=False): occur_filename = os.path.abspath(root + "graph-" + filename) edge_filename = os.path.abspath(root + "edge-" + filename) sql_filename = os.path.abspath(root + "sql-" + filename) print "processing articles..." process_articles(entity_type, occur_filename, corpus_root=corpus_root) print "running apriori miner..." dm.apriori(occur_filename, edge_filename) print "processing edges..." edges = dm.process_edges(occur_filename, edge_filename) ents = dm.calculate_node_entropy(edges) edges = dm.calculate_edge_weight(edges, ents) print "creating sql files..." with open(sql_filename, 'w') as f: for edge, props in edges.iteritems(): ante, cons = edge row = "%s::%s" % edge row += "::%(confidence)s::%(jweight)s::%(weight)s\n" % props f.write(row) print "updating term entropy..." if update_entropy: for term_id, entropy in ents.iteritems(): term = Session.query(Idea).get(term_id) if term: term.entropy = entropy Session.flush() Session.commit() Session.close() # Import SQL statements if entity_type == Idea: table = "idea_graph_edges" elif entity_type == Thinker: table = "thinker_graph_edges" else: table = "idea_thinker_graph_edges" connection = Session.connection() print "deleting old graph information ..." connection.execute(""" DELETE FROM %(table)s; """ % { 'filename': sql_filename, 'table': table }) print "inserting new graph information" connection.execute(""" SET foreign_key_checks=0; LOAD DATA INFILE '%(filename)s' INTO TABLE %(table)s FIELDS TERMINATED BY '::' (ante_id, cons_id, confidence, jweight, weight); SET foreign_key_checks=1; """ % { 'filename': sql_filename, 'table': table }) Session.close()
def create(self, entity_type=None, filetype='html', valid_params=None): # check if user is logged in if not h.auth.is_logged_in(): abort(401) if not h.auth.is_admin(): abort(403) sep_dir = None params = request.params.mixed() if entity_type is None: entity_type = int(params['entity_type']) del params['entity_type'] if valid_params is None: if entity_type == 1: # Idea valid_params = ["sep_dir", "searchstring", "searchpattern", "wiki"] elif entity_type == 3 or entity_type == 5: # Thinker or Work valid_params = ["sep_dir", "wiki"] elif entity_type == 4: # Journal valid_params = ["ISSN", "noesisInclude", "URL", "source", "abbr", "language", "student", "active", "wiki"] elif entity_type == 6: #School of Thought valid_params = ["sep_dir", "wiki"] if '_method' in params: del params['_method'] if 'redirect' in params: del params['redirect'] if 'sep_dir' in params: sep_dir = params['sep_dir'] del params['sep_dir'] if 'label' in params: label = params['label'] del params['label'] elif 'name' in params: label = params['name'] del params['name'] else: abort(400) for k in params.keys(): if k not in valid_params: abort(400) # If entity exists, redirect and return HTTP 302 c.entity = Session.query(Entity).filter(Entity.label==label).first() if c.entity: redirect(c.entity.url(filetype, action="view"), code=302) else: # Entity doesn't exist, create a new one. if entity_type == 1: c.entity = Idea(label, sep_dir=sep_dir) elif entity_type == 3: c.entity = Thinker(label, sep_dir=sep_dir) elif entity_type == 4: c.entity = Journal(label, sep_dir=sep_dir) elif entity_type == 5: c.entity = Work(label, sep_dir=sep_dir) elif entity_type == 6: c.entity = SchoolOfThought(label, sep_dir=sep_dir) else: raise NotImplementedError Session.add(c.entity) Session.commit() if redirect: sleep(5) # TODO: figure out database slowness so this can be removed redirect(c.entity.url(filetype, action="view"), code=303) else: return "200 OK"
def create(self, entity_type=None, filetype="html", valid_params=None): # check if user is logged in if not h.auth.is_logged_in(): abort(401) if not h.auth.is_admin(): abort(403) sep_dir = None params = request.params.mixed() if entity_type is None: entity_type = int(params["entity_type"]) del params["entity_type"] if valid_params is None: if entity_type == 1: # Idea valid_params = ["sep_dir", "searchstring", "searchpattern", "wiki"] elif entity_type == 3 or entity_type == 5: # Thinker or Work valid_params = ["sep_dir", "wiki"] elif entity_type == 4: # Journal valid_params = [ "ISSN", "noesisInclude", "URL", "source", "abbr", "language", "student", "active", "wiki", ] elif entity_type == 6: # School of Thought valid_params = ["sep_dir", "wiki"] if "_method" in params: del params["_method"] if "redirect" in params: del params["redirect"] if "sep_dir" in params: sep_dir = params["sep_dir"] del params["sep_dir"] if "label" in params: label = params["label"] del params["label"] elif "name" in params: label = params["name"] del params["name"] else: abort(400) for k in params.keys(): if k not in valid_params: abort(400) # If entity exists, redirect and return HTTP 302 c.entity = Session.query(Entity).filter(Entity.label == label).first() if c.entity: redirect(c.entity.url(filetype, action="view"), code=302) else: # Entity doesn't exist, create a new one. if entity_type == 1: c.entity = Idea(label, sep_dir=sep_dir) elif entity_type == 3: c.entity = Thinker(label, sep_dir=sep_dir) elif entity_type == 4: c.entity = Journal(label, sep_dir=sep_dir) elif entity_type == 5: c.entity = Work(label, sep_dir=sep_dir) elif entity_type == 6: c.entity = SchoolOfThought(label, sep_dir=sep_dir) else: raise NotImplementedError Session.add(c.entity) Session.commit() if redirect: sleep(5) # TODO: figure out database slowness so this can be removed redirect(c.entity.url(filetype, action="view"), code=303) else: return "200 OK"
# to see if it is still valid. If the status code is a 302 redirect, # update the URL in the database. If it's accessible (status code == # 200 or 30x), update the last_accessed field. If it doesn't open at all, # raise an error. If it's been inaccessible for four weeks, raise an # error. journal_list = Session.query(Journal).all() for journal in journal_list: try: f = urllib.urlopen(journal.URL) status = f.getcode() if (status == 302): journal.URL = f.geturl() # UNTESTED!! if (status <= 307): journal.last_accessed = time.time() except: errormsg = "As of {0}, the journal {1} had a bad URL: {2}" print >> sys.stderr, errormsg.format(time.strftime("%Y-%m-%d %H:%M:%S"), journal.name, journal.URL) # "magic number" 2419200 == four weeks in seconds if not journal.last_accessed or (time.time() - journal.last_accessed > 2419200): errormsg = "As of {0}, the journal {1} has been inaccessible for four weeks." print >> sys.stderr, errormsg.format(time.strftime("%Y-%m-%d %H:%M:%S"), journal.name) # write to the database Session.commit() Session.flush() print "Succesfully checked {0} journal URLS.".format(len(journal_list))