def get_public_content(api, username, request): then = (datetime.now(pytz.utc) - timedelta(days=120)).strftime("%s") query = \ '// Retrieve all the current users questions\n' \ 'MATCH (a:Pleb {username: "******"})<-[:OWNED_BY]-' \ '(questions:Question) ' \ 'WHERE questions.to_be_deleted = False AND questions.created > %s' \ ' AND questions.is_closed = False ' \ 'RETURN questions, NULL AS solutions, ' \ 'questions.created AS created, NULL AS s_question UNION ' \ '// Retrieve all the current users solutions\n' \ 'MATCH (a:Pleb {username: "******"})<-' \ '[:OWNED_BY]-(solutions:Solution)<-' \ '[:POSSIBLE_ANSWER]-(s_question:Question) ' \ 'WHERE s_question.to_be_deleted = False ' \ 'AND solutions.created > %s' \ ' AND solutions.is_closed = False ' \ 'AND s_question.is_closed = False ' \ 'AND solutions.to_be_deleted = False ' \ 'RETURN solutions, NULL AS questions, ' \ 'solutions.created AS created, s_question AS s_question' \ % (username, then, username, then) news = [] res, _ = db.cypher_query(query) # Profiled with ~50 objects and it was still performing under 1 ms. # By the time sorting in python becomes an issue the above mentioned # ticket should be resolved. res = sorted(res, key=attrgetter('created'), reverse=True)[:5] page = api.paginate_queryset(res) for row in page: news_article = None if row.questions is not None: row.questions.pull() news_article = QuestionSerializerNeo(Question.inflate( row.questions), context={ 'request': request }).data elif row.solutions is not None: row.s_question.pull() row.solutions.pull() question_data = QuestionSerializerNeo( Question.inflate(row.s_question)).data news_article = SolutionSerializerNeo(Solution.inflate( row.solutions), context={ 'request': request }).data news_article['question'] = question_data news.append(news_article) return api.get_paginated_response(news)
def get_focused_on(self, request=None): from api.neo_models import SBObject from sb_quests.neo_models import Position from sb_quests.serializers import PositionSerializer from sb_tags.neo_models import Tag from sb_tags.serializers import TagSerializer from sb_questions.neo_models import Question from sb_questions.serializers import QuestionSerializerNeo query = 'MATCH (a:Mission {object_uuid: "%s"})-[:FOCUSED_ON]->(b)' \ 'RETURN b' % self.object_uuid res, _ = db.cypher_query(query) if res.one: child_label = SBObject.inflate(res.one).get_child_label() if child_label == "Position": return PositionSerializer(Position.inflate(res.one), context={ 'request': request }).data elif child_label == "Tag": return TagSerializer(Tag.inflate(res.one), context={ 'request': request }).data elif child_label == "Question": return QuestionSerializerNeo(Question.inflate(res.one), context={ 'request': request }).data else: return None
def repopulate_elasticsearch(self): # Profiles skip = 0 while True: query = 'MATCH (profile:Pleb) RETURN DISTINCT profile ' \ 'SKIP %s LIMIT 25' % skip skip += 24 res, _ = db.cypher_query(query) if not res.one: break for profile in [Pleb.inflate(row[0]) for row in res]: update_search_object.apply_async(kwargs={ "object_uuid": profile.object_uuid, "label": "pleb" }) # Questions skip = 0 while True: query = 'MATCH (question:Question) RETURN DISTINCT question ' \ 'SKIP %s LIMIT 25' % skip skip += 24 res, _ = db.cypher_query(query) if not res.one: break for question in [Question.inflate(row[0]) for row in res]: update_search_object.apply_async(kwargs={ "object_uuid": question.object_uuid, "label": "question" }) # Quests skip = 0 while True: query = 'MATCH (quest:Quest) RETURN DISTINCT quest ' \ 'SKIP %s LIMIT 25' % skip skip += 24 res, _ = db.cypher_query(query) if not res.one: break for quest in [Quest.inflate(row[0]) for row in res]: update_search_object.apply_async(kwargs={ "object_uuid": quest.object_uuid, "label": "quest" }) # Missions skip = 0 while True: query = 'MATCH (mission:Mission) RETURN DISTINCT mission ' \ 'SKIP %s LIMIT 25' % skip skip += 24 res, _ = db.cypher_query(query) if not res.one: break for mission in [Mission.inflate(row[0]) for row in res]: update_search_object.apply_async(kwargs={ "object_uuid": mission.object_uuid, "label": "mission" })
def migrate_questions(self): query = 'MATCH (a:Question) RETURN a' res, _ = db.cypher_query(query) for question in [Question.inflate(row[0]) for row in res]: spawn_task(task_func=create_question_summary_task, task_param={'object_uuid': question.object_uuid}) self.stdout.write("completed question migration\n", ending='')
def migrate_to_new_editor(self): skip = 0 while True: query = 'MATCH (m:Mission) RETURN m SKIP %s LIMIT 25' % skip skip += 24 res, _ = db.cypher_query(query) if not res.one: break for mission in [Mission.inflate(row[0]) for row in res]: rendered = render_content( markdown.markdown(mission.epic.replace( '>', '>')).replace('<a', '<a target="_blank"')) mission.epic = rendered mission.temp_epic = rendered mission.save() skip = 0 while True: query = 'MATCH (m:Question) RETURN m SKIP %s LIMIT 25' % skip skip += 24 res, _ = db.cypher_query(query) if not res.one: break for question in [Question.inflate(row[0]) for row in res]: rendered = render_content( markdown.markdown(question.content.replace( '>', '>')).replace('<a', '<a target="_blank"')) question.content = rendered question.save() skip = 0 while True: query = 'MATCH (m:Solution) RETURN m SKIP %s LIMIT 25' % skip skip += 24 res, _ = db.cypher_query(query) if not res.one: break for solution in [Solution.inflate(row[0]) for row in res]: rendered = render_content( markdown.markdown(solution.content.replace( '>', '>')).replace('<a', '<a target="_blank"')) solution.content = rendered solution.save() skip = 0 while True: query = 'MATCH (m:Update) RETURN m SKIP %s LIMIT 25' % skip skip += 24 res, _ = db.cypher_query(query) if not res.one: break for update in [Update.inflate(row[0]) for row in res]: rendered = render_content( markdown.markdown(update.content.replace( '>', '>')).replace('<a', '<a target="_blank"')) update.content = rendered update.save() cache.set("migrated_to_new_editor", True)
def connect_related_element(location, element_id): # This could be generalized to manage other nodes we want to link to a # location but since we only do questions right now, simplifying it. from sb_questions.neo_models import Question query = 'MATCH (a:Question {external_location_id: "%s"}) RETURN a' % ( element_id) res, _ = db.cypher_query(query) if res.one: connection_node = Question.inflate(res.one) connection_node.focus_location.connect(location) else: connection_node = None return connection_node
def list(self, request, *args, **kwargs): council_list = [] html = request.query_params.get('html', 'false').lower() queryset = self.get_queryset() page = self.paginate_queryset(queryset) for row in page: if row[0] is not None: row[0].pull() # fix for None objects being returned # from the query due to multiple column returns council_object = None if row.questions is not None: council_object = QuestionSerializerNeo(Question.inflate( row.questions), context={ 'request': request }).data elif row.solutions is not None: council_object = SolutionSerializerNeo(Solution.inflate( row.solutions), context={ 'request': request }).data elif row.comments is not None: council_object = CommentSerializer(Comment.inflate( row.comments), context={ 'request': request }).data elif row.posts is not None: council_object = PostSerializerNeo(Post.inflate(row.posts), context={ 'request': request }).data if html == 'true': council_object['last_edited_on'] = parser.parse( council_object['last_edited_on']) council_object['request'] = request council_object = { "html": render_to_string("council_votable.html", council_object), "id": council_object["id"], "type": council_object["type"] } council_list.append(council_object) return self.get_paginated_response(council_list)
def update_search_object(object_uuid, label=None, object_data=None, index="full-search-base"): from plebs.serializers import PlebSerializerNeo from sb_quests.serializers import QuestSerializer from sb_quests.neo_models import Quest from sb_missions.serializers import MissionSerializer from sb_missions.neo_models import Mission from sb_questions.serializers import QuestionSerializerNeo from sb_base.neo_models import get_parent_votable_content if label is None: label = get_parent_votable_content( object_uuid).get_child_label().lower() logger.critical("Updating Search Object") logger.critical({"object_uuid": object_uuid}) query = 'MATCH (a:%s {object_uuid:"%s"}) RETURN a' % \ (label.title(), object_uuid) res, _ = db.cypher_query(query) if res.one: res.one.pull() else: raise update_search_object.retry(exc=DoesNotExist( 'Object with uuid: %s ' 'does not exist' % object_uuid), countdown=3, max_retries=None) if label == "question": instance = Question.inflate(res.one) object_data = QuestionSerializerNeo(instance).data if 'mission' in object_data: object_data.pop('mission') if 'profile' in object_data: object_data.pop('profile') logger.critical(object_data) elif label == "quest": instance = Quest.inflate(res.one) object_data = QuestSerializer(instance).data logger.critical(object_data) elif label == "mission": instance = Mission.inflate(res.one) object_data = MissionSerializer(instance).data # Need to pop review_feedback because ES's serializer cannot parse # set types. # If we don't pop it we receive # TypeError("Unable to serialize set([]) (type: <type 'set'>)",)) # If we can submit a JSON serialized version we can get around this by # using: # from rest_framework.renderers import JSONRenderer # JSONRenderer().render(serializer.data) # Also please note python's json.dumps() function runs into this same # issue. if 'review_feedback' in object_data: object_data.pop('review_feedback', None) if 'quest' in object_data: object_data.pop('quest') logger.critical(object_data) elif label == "pleb": instance = Pleb.inflate(res.one) object_data = PlebSerializerNeo(instance).data if 'quest' in object_data: object_data.pop('quest') logger.critical(object_data) else: # Currently we only need this functionality for Questions as # they are the only objects in search that we display votes # for in the search interface. error_dict = { "message": "Search False setup. " "Object Data None, Instance not None", "instance_label": label, "instance_uuid": object_uuid, } logger.critical(error_dict) return False try: es = Elasticsearch(settings.ELASTIC_SEARCH_HOST) res = es.index(index=index, doc_type=object_data['type'], id=object_uuid, body=object_data) except (ElasticsearchException, TransportError, ConflictError, RequestError) as e: logger.exception("Failed to connect to Elasticsearch") logger.critical(object_data) raise update_search_object.retry(exc=e, countdown=5, max_retries=None) except KeyError: error_dict = { "message": "Search: KeyError False creation", "instance_uuid": object_uuid, "object_data": object_data } logger.critical(error_dict) return False try: if instance.search_id is None: instance.search_id = res['_id'] instance.populated_es_index = True instance.save() except AttributeError: pass cache.delete("%s_vote_search_update" % object_uuid) return res
def newsfeed(self, request): """ The newsfeed endpoint expects to be called on the me endpoint and assumes that the request object provided will contain the user the newsfeed is being provided to. It is not included as a list_route on the me endpoint due to the me endpoint not being a viewset. If we transition to that structure it could easily be moved to a list_route there. Query if we want to grab tags: MATCH (a:Pleb {username: "******"})- [OWNS_QUESTION]->(questions:Question)-[:TAGGED_AS]->(tags:Tag) WHERE questions.to_be_deleted = False AND questions.created > %s RETURN questions, tags.name AS tags, NULL as solutions, NULL as posts UNION MATCH (a)-[manyFriends:FRIENDS_WITH*2 {active: True}]-> ()-[OWNS_QUESTION]->(questions:Question)-[:TAGGED_AS]-> (tags:Tag) WHERE questions.to_be_deleted = False AND questions.created > %s RETURN questions, tags.name AS tags, NULL as posts, NULL as solutions UNION :param request: """ # This query retrieves all of the current user's posts, solutions, # and questions as well as their direct friends posts, solutions, # and questions. It then looks for all of their friends friends # solutions and questions, combines all of the content and # returns the result. The query filters out content scheduled for # deletion and only looks for content created more recently than the # time provided. The reasoning for not including friends of friends # posts is to try and improve privacy. Friends of friends have not # actually been accepted potentially as friends by the user and # therefore should not have access to information posted on the user's # wall which in this case would be their posts. # We currently do not sort this query in neo because we are waiting # for post processing on unions as a whole to be added as a feature. # See Github issue #2725 for updates # https://github.com/neo4j/neo4j/issues/2725 then = (datetime.now(pytz.utc) - timedelta(days=120)).strftime("%s") query = \ '// Retrieve all the current users questions\n' \ 'MATCH (a:Pleb {username: "******"})<-[:OWNED_BY]-' \ '(questions:Question) ' \ 'WHERE questions.to_be_deleted = False AND questions.created > %s' \ ' AND questions.is_closed = False ' \ 'RETURN questions, NULL AS solutions, NULL AS posts, ' \ 'questions.created AS created, NULL AS s_question, ' \ 'NULL AS mission, NULL AS updates, NULL AS q_mission, ' \ 'NULL AS news UNION ' \ '' \ '// Retrieve all the news articles the user may be \n' \ '// interested in\n' \ 'MATCH (a:Pleb {username: "******"})-[:INTERESTED_IN]->' \ '(tag:Tag)<-[:TAGGED_AS]-(news:NewsArticle) ' \ 'WHERE news.published > %s AND news.is_closed = False ' \ ' RETURN DISTINCT news, NULL AS solutions, NULL AS posts, ' \ 'news.published AS created, NULL AS s_question, ' \ 'NULL AS mission, NULL AS updates, NULL AS q_mission, ' \ 'NULL AS questions UNION ' \ '' \ '// Retrieve all the current users solutions\n' \ 'MATCH (a:Pleb {username: "******"})<-' \ '[:OWNED_BY]-(solutions:Solution)<-' \ '[:POSSIBLE_ANSWER]-(s_question:Question) ' \ 'WHERE s_question.to_be_deleted = False ' \ 'AND solutions.created > %s' \ ' AND solutions.is_closed = False ' \ 'AND s_question.is_closed = False ' \ 'RETURN solutions, NULL AS questions, NULL AS posts, ' \ 'solutions.created AS created, s_question AS s_question,' \ 'NULL AS mission, NULL AS updates, NULL AS q_mission, ' \ 'NULL AS news UNION ' \ '' \ '// Retrieve all the current users posts\n' \ 'MATCH (a:Pleb {username: "******"})<-[:OWNED_BY]-(posts:Post) ' \ 'WHERE posts.to_be_deleted = False AND posts.created > %s ' \ 'AND posts.is_closed = False ' \ 'RETURN posts, NULL as questions, NULL as solutions, ' \ 'posts.created AS created, NULL AS s_question,' \ 'NULL AS mission, NULL AS updates, NULL AS q_mission, ' \ 'NULL AS news UNION ' \ '' \ '// Retrieve all the posts on the current users wall that are \n' \ '// not owned by the current user \n' \ 'MATCH (a:Pleb {username: "******"})-[:OWNS_WALL]->(w:Wall)' \ '-[:HAS_POST]->(posts:Post) ' \ 'WHERE NOT (posts)-[:OWNED_BY]->(a) AND ' \ 'posts.to_be_deleted = False AND posts.created > %s ' \ 'AND posts.is_closed = False ' \ 'RETURN posts, NULL as questions, NULL as solutions, ' \ 'posts.created AS created, NULL AS s_question,' \ 'NULL AS mission, NULL AS updates, NULL AS q_mission, ' \ 'NULL AS news UNION ' \ '' \ '// Retrieve the missions affecting the given user\n' \ 'MATCH (a:Pleb {username: "******"})-[:LIVES_AT]->(:Address)-' \ '[:ENCOMPASSED_BY*..]->' \ '(:Location)<-[:WITHIN]-(mission:Mission {active: true})' \ '<-[:EMBARKS_ON]-(quest:Quest {active: true}) ' \ 'WHERE NOT((mission)-[:FOCUSED_ON]->(:Position {verified:false}))' \ ' AND mission.created > %s ' \ 'RETURN mission, NULL AS solutions, NULL AS posts, ' \ 'NULL AS questions, mission.created AS created, ' \ 'NULL AS s_question, NULL AS updates, NULL AS q_mission, ' \ 'NULL AS news UNION ' \ '' \ '// Retrieve the mission updates affecting ' \ '// the given user\n' \ 'MATCH (a:Pleb {username: "******"})-[:LIVES_AT]->(:Address)-' \ '[:ENCOMPASSED_BY*..]->' \ '(:Location)<-[:WITHIN]-(q_mission:Mission {active: true})' \ '<-[:EMBARKS_ON]-(quest:Quest {active: true}) WITH q_mission ' \ 'MATCH (q_mission)<-[:ABOUT]-(updates:Update) ' \ 'WHERE NOT((q_mission)-[:FOCUSED_ON]' \ '->(:Position {verified:false}))' \ ' AND updates.created > %s AND updates.is_closed = False ' \ 'RETURN updates, NULL AS solutions, NULL AS posts, ' \ 'NULL AS questions, updates.created AS created, ' \ 'NULL AS s_question, NULL as mission, q_mission, ' \ 'NULL AS news UNION ' \ '' \ '// Retrieve all the posts owned by users that the current user ' \ '// is following \n' \ 'MATCH (a:Pleb {username: "******"})-[r:FOLLOWING {active: True}]->' \ '(:Pleb)<-[:OWNED_BY]-(posts:Post) ' \ 'WHERE posts.to_be_deleted = False AND ' \ 'posts.created > %s AND posts.is_closed = False ' \ 'RETURN NULL AS solutions, posts AS posts, ' \ 'NULL AS questions, posts.created AS created, ' \ 'NULL AS s_question, NULL AS mission, NULL AS updates, ' \ 'NULL AS q_mission, ' \ 'NULL AS news UNION ' \ '' \ '// Retrieve all the users questions that the current user is ' \ '// following \n' \ 'MATCH (a:Pleb {username: "******"})-[r:FOLLOWING {active: True}]->' \ '(:Pleb)<-[:OWNED_BY]-(questions:Question) ' \ 'WHERE questions.to_be_deleted = False AND ' \ 'questions.created > %s AND questions.is_closed = False ' \ 'RETURN NULL AS solutions, NULL AS posts, ' \ 'questions AS questions, questions.created AS created, ' \ 'NULL AS s_question, NULL AS mission, NULL AS updates, ' \ 'NULL AS q_mission, ' \ 'NULL AS news UNION ' \ '' \ '// Retrieve all the users solutions that the current user is ' \ '// following \n' \ 'MATCH (a:Pleb {username: "******"})-[r:FOLLOWING {active: True}]->' \ '(:Pleb)<-[:OWNED_BY]-(solutions:Solution)<-' \ '[:POSSIBLE_ANSWER]-(s_question:Question) ' \ 'WHERE s_question.to_be_deleted = False AND ' \ 'solutions.created > %s AND solutions.is_closed = False ' \ 'RETURN solutions, NULL AS posts, ' \ 'NULL AS questions, solutions.created AS created, ' \ 's_question as s_question, NULL AS mission, NULL AS updates, ' \ 'NULL AS q_mission, ' \ 'NULL AS news' \ % ( request.user.username, then, request.user.username, then, request.user.username, then, request.user.username, then, request.user.username, then, request.user.username, then, request.user.username, then, request.user.username, then, request.user.username, then, request.user.username, then) news = [] res, _ = db.cypher_query(query) # Profiled with ~50 objects and it was still performing under 1 ms. # By the time sorting in python becomes an issue the above mentioned # ticket should be resolved. res = sorted(res, key=attrgetter('created'), reverse=True) page = self.paginate_queryset(res) for row in page: news_article = None if row.questions is not None: row.questions.pull() news_article = QuestionSerializerNeo(Question.inflate( row.questions), context={ 'request': request }).data elif row.solutions is not None: row.s_question.pull() row.solutions.pull() question_data = QuestionSerializerNeo( Question.inflate(row.s_question)).data news_article = SolutionSerializerNeo(Solution.inflate( row.solutions), context={ 'request': request }).data news_article['question'] = question_data elif row.posts is not None: row.posts.pull() news_article = PostSerializerNeo(Post.inflate(row.posts), context={ 'request': request }).data elif row.mission is not None: row.mission.pull() news_article = MissionSerializer(Mission.inflate(row.mission), context={ 'request': request }).data news_article['reputation'] = Pleb.get( username=news_article['owner_username']).reputation elif row.updates is not None: row.updates.pull() row.q_mission.pull() news_article = UpdateSerializer(Update.inflate(row.updates), context={ 'request': request }).data news_article['mission'] = MissionSerializer( Mission.inflate(row.q_mission), context={ 'request': request }).data elif row.news is not None: row.news.pull() news_article = NewsArticleSerializer(NewsArticle.inflate( row.news), context={ 'request': request }).data news.append(news_article) return self.get_paginated_response(news)