def migrate_to_following(self):
        skip = 0
        while True:
            query = 'MATCH (profile:Pleb) RETURN profile ' \
                    'SKIP %s LIMIT 25' % skip
            skip += 24
            res, _ = db.cypher_query(query)
            if not res.one:
                break
            for profile in [Pleb.inflate(row[0]) for row in res]:
                friend_query = 'MATCH (a:Pleb {username: "******"})' \
                               '-[:FRIENDS_WITH]->(b:Pleb) ' \
                               'RETURN b' % profile.username
                friend_res, _ = db.cypher_query(friend_query)
                for friend in [
                        Pleb.inflate(friend_row[0])
                        for friend_row in friend_res
                ]:
                    try:
                        profile.follow(friend.username)
                        friend.follow(profile.username)
                    except (ConstraintViolation, Exception):
                        pass
        self.stdout.write("completed friend migration\n", ending='')

        return True
Esempio n. 2
0
    def populate_last_reputation_node(self):
        skip = 0
        while True:
            query = 'MATCH (profile:Pleb) RETURN profile ' \
                    'SKIP %s LIMIT 25' % skip
            skip += 24
            res, _ = db.cypher_query(query)
            if not res.one:
                break
            for profile in [Pleb.inflate(row[0]) for row in res]:
                if profile.last_counted_vote_node is not None:
                    continue
                else:
                    query = 'MATCH (v:Vote)<-[:LAST_VOTES]-' \
                            '(content:VotableContent)-[:OWNED_BY]->' \
                            '(p:Pleb {username: "******"}) ' \
                            'WITH v ORDER BY v.created DESC ' \
                            'RETURN v LIMIT 1' % profile.username
                    res, _ = db.cypher_query(query)
                    if res.one is not None:
                        profile.last_counted_vote_node = res.one['object_uuid']
                        profile.save()

        self.stdout.write("completed vote population\n", ending='')

        return True
Esempio n. 3
0
 def get_notification_from(self, obj):
     query = 'MATCH (a:Notification {object_uuid: "%s"})-' \
             '[:NOTIFICATION_FROM]->(b:Pleb) RETURN b' % obj.object_uuid
     res, _ = db.cypher_query(query)
     if res.one is None:
         return {}
     return PlebSerializerNeo(Pleb.inflate(res[0][0])).data
Esempio n. 4
0
 def create(self, request, *args, **kwargs):
     serializer = self.get_serializer(data=request.data,
                                      context={"request": request})
     if serializer.is_valid():
         question = Question.nodes.get(
             object_uuid=self.kwargs[self.lookup_field])
         instance = serializer.save(question=question)
         query = "MATCH (a:Question {object_uuid:'%s'})-[:OWNED_BY]->" \
                 "(b:Pleb) RETURN b" % (self.kwargs[self.lookup_field])
         res, col = db.cypher_query(query)
         question_owner = Pleb.inflate(res[0][0])
         serializer = serializer.data
         to_plebs = [question_owner.username]
         mission = question.get_mission(question.object_uuid)
         if mission:
             to_plebs.append(mission['owner_username'])
         spawn_task(task_func=spawn_notifications, task_param={
             "from_pleb": request.user.username,
             "sb_object": serializer['object_uuid'],
             "url": reverse(
                 'single_solution_page',
                 kwargs={"object_uuid": serializer["object_uuid"]}),
             # TODO discuss notifying all the people who have provided
             # solutions on a given question.
             "to_plebs": to_plebs,
             "notification_id": str(uuid1()),
             'action_name': instance.action_name
         })
         # Not going to add until necessary for search
         # spawn_task(task_func=add_solution_to_search_index,
         #            task_param={"solution": serializer})
         return Response(serializer, status=status.HTTP_200_OK)
     return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
Esempio n. 5
0
    def repopulate_elasticsearch(self):
        # Profiles
        skip = 0
        while True:
            query = 'MATCH (profile:Pleb) RETURN DISTINCT profile ' \
                    'SKIP %s LIMIT 25' % skip
            skip += 24
            res, _ = db.cypher_query(query)
            if not res.one:
                break
            for profile in [Pleb.inflate(row[0]) for row in res]:
                update_search_object.apply_async(kwargs={
                    "object_uuid": profile.object_uuid,
                    "label": "pleb"
                })

        # Questions
        skip = 0
        while True:
            query = 'MATCH (question:Question) RETURN DISTINCT question ' \
                    'SKIP %s LIMIT 25' % skip
            skip += 24
            res, _ = db.cypher_query(query)
            if not res.one:
                break
            for question in [Question.inflate(row[0]) for row in res]:
                update_search_object.apply_async(kwargs={
                    "object_uuid": question.object_uuid,
                    "label": "question"
                })

        # Quests
        skip = 0
        while True:
            query = 'MATCH (quest:Quest) RETURN DISTINCT quest ' \
                    'SKIP %s LIMIT 25' % skip
            skip += 24
            res, _ = db.cypher_query(query)
            if not res.one:
                break
            for quest in [Quest.inflate(row[0]) for row in res]:
                update_search_object.apply_async(kwargs={
                    "object_uuid": quest.object_uuid,
                    "label": "quest"
                })
        # Missions
        skip = 0
        while True:
            query = 'MATCH (mission:Mission) RETURN DISTINCT mission ' \
                    'SKIP %s LIMIT 25' % skip
            skip += 24
            res, _ = db.cypher_query(query)
            if not res.one:
                break
            for mission in [Mission.inflate(row[0]) for row in res]:
                update_search_object.apply_async(kwargs={
                    "object_uuid": mission.object_uuid,
                    "label": "mission"
                })
Esempio n. 6
0
 def endorsements(self, request, object_uuid=None):
     serialized = []
     endorsements = Mission.get_endorsements(object_uuid)
     page = self.paginate_queryset(endorsements)
     for node in page:
         if "Pleb" in node.labels:
             serialized.append(PlebSerializerNeo(Pleb.inflate(node.e)).data)
         if "Quest" in node.labels:
             serialized.append(QuestSerializer(Quest.inflate(node.e)).data)
     return self.get_paginated_response(serialized)
Esempio n. 7
0
def update_search_query(username, query_param, keywords):
    """
    This task creates a search query node then calls the task to create and
    attach keyword nodes to the search query node

    :param username:
    :param query_param:
    :param keywords:
    :return:
    """
    try:
        res, _ = db.cypher_query("MATCH (a:Pleb {username:'******'}) RETURN a" %
                                 username)
        if res.one:
            res.one.pull()
            pleb = Pleb.inflate(res.one)
        else:
            raise update_search_query.retry(exc=DoesNotExist(
                "Profile with username: "******"%s does not exist" % username),
                                            countdown=3,
                                            max_retries=None)
    except (CypherException, IOError) as e:
        raise update_search_query.retry(exc=e, countdown=3, max_retries=None)
    try:
        search_query = SearchQuery.nodes.get(search_query=query_param)
        if pleb.searches.is_connected(search_query):
            rel = pleb.searches.relationship(search_query)
            rel.times_searched += 1
            rel.last_searched = datetime.now(pytz.utc)
            rel.save()
            return True
        else:
            rel = pleb.searches.connect(search_query)
            rel.save()
            search_query.searched_by.connect(pleb)
            return True
    except (SearchQuery.DoesNotExist, DoesNotExist):
        search_query = SearchQuery(search_query=query_param)
        search_query.save()
        search_query.searched_by.connect(pleb)
        rel = pleb.searches.connect(search_query)
        rel.save()
        for keyword in keywords:
            keyword['query_param'] = query_param
            spawned = spawn_task(task_func=create_keyword, task_param=keyword)
            if isinstance(spawned, Exception) is True:
                return spawned
        return True
    except (CypherException, IOError) as e:
        raise update_search_query.retry(exc=e, countdown=3, max_retries=None)
    except Exception as e:
        raise update_search_query.retry(exc=e, countdown=3, max_retries=None)
Esempio n. 8
0
def update_search_object(object_uuid,
                         label=None,
                         object_data=None,
                         index="full-search-base"):
    from plebs.serializers import PlebSerializerNeo
    from sb_quests.serializers import QuestSerializer
    from sb_quests.neo_models import Quest
    from sb_missions.serializers import MissionSerializer
    from sb_missions.neo_models import Mission
    from sb_questions.serializers import QuestionSerializerNeo
    from sb_base.neo_models import get_parent_votable_content
    if label is None:
        label = get_parent_votable_content(
            object_uuid).get_child_label().lower()
    logger.critical("Updating Search Object")
    logger.critical({"object_uuid": object_uuid})
    query = 'MATCH (a:%s {object_uuid:"%s"}) RETURN a' % \
            (label.title(), object_uuid)
    res, _ = db.cypher_query(query)
    if res.one:
        res.one.pull()
    else:
        raise update_search_object.retry(exc=DoesNotExist(
            'Object with uuid: %s '
            'does not exist' % object_uuid),
                                         countdown=3,
                                         max_retries=None)
    if label == "question":
        instance = Question.inflate(res.one)
        object_data = QuestionSerializerNeo(instance).data
        if 'mission' in object_data:
            object_data.pop('mission')
        if 'profile' in object_data:
            object_data.pop('profile')
        logger.critical(object_data)
    elif label == "quest":
        instance = Quest.inflate(res.one)
        object_data = QuestSerializer(instance).data
        logger.critical(object_data)
    elif label == "mission":
        instance = Mission.inflate(res.one)
        object_data = MissionSerializer(instance).data
        # Need to pop review_feedback because ES's serializer cannot parse
        # set types.
        # If we don't pop it we receive
        # TypeError("Unable to serialize set([]) (type: <type 'set'>)",))
        # If we can submit a JSON serialized version we can get around this by
        # using:
        # from rest_framework.renderers import JSONRenderer
        # JSONRenderer().render(serializer.data)
        # Also please note python's json.dumps() function runs into this same
        # issue.
        if 'review_feedback' in object_data:
            object_data.pop('review_feedback', None)
        if 'quest' in object_data:
            object_data.pop('quest')
        logger.critical(object_data)
    elif label == "pleb":
        instance = Pleb.inflate(res.one)
        object_data = PlebSerializerNeo(instance).data
        if 'quest' in object_data:
            object_data.pop('quest')
        logger.critical(object_data)
    else:
        # Currently we only need this functionality for Questions as
        # they are the only objects in search that we display votes
        # for in the search interface.
        error_dict = {
            "message": "Search False setup. "
            "Object Data None, Instance not None",
            "instance_label": label,
            "instance_uuid": object_uuid,
        }
        logger.critical(error_dict)
        return False
    try:
        es = Elasticsearch(settings.ELASTIC_SEARCH_HOST)
        res = es.index(index=index,
                       doc_type=object_data['type'],
                       id=object_uuid,
                       body=object_data)
    except (ElasticsearchException, TransportError, ConflictError,
            RequestError) as e:
        logger.exception("Failed to connect to Elasticsearch")
        logger.critical(object_data)
        raise update_search_object.retry(exc=e, countdown=5, max_retries=None)
    except KeyError:
        error_dict = {
            "message": "Search: KeyError False creation",
            "instance_uuid": object_uuid,
            "object_data": object_data
        }
        logger.critical(error_dict)
        return False
    try:
        if instance.search_id is None:
            instance.search_id = res['_id']
            instance.populated_es_index = True
            instance.save()
    except AttributeError:
        pass

    cache.delete("%s_vote_search_update" % object_uuid)
    return res