Example #1
0
def update_cached_actor(actor: ap.BaseActivity) -> None:
    actor_hash = _actor_hash(actor)
    update_many_activities(
        {
            **flag(MetaKey.ACTOR_ID, actor.id),
            **flag(MetaKey.ACTOR_HASH, {"$ne": actor_hash}),
        },
        upsert({
            MetaKey.ACTOR: actor.to_dict(embed=True),
            MetaKey.ACTOR_HASH: actor_hash
        }),
    )
    update_many_activities(
        {
            **flag(MetaKey.OBJECT_ACTOR_ID, actor.id),
            **flag(MetaKey.OBJECT_ACTOR_HASH, {"$ne": actor_hash}),
        },
        upsert({
            MetaKey.OBJECT_ACTOR: actor.to_dict(embed=True),
            MetaKey.OBJECT_ACTOR_HASH: actor_hash,
        }),
    )
    # TODO(tsileo): Also update following (it's in the object)
    # DB.activities.update_many(
    #     {"meta.object_id": actor.id}, {"$set": {"meta.object": actor.to_dict(embed=True)}}
    # )
    _cache_actor_icon(actor)
Example #2
0
def task_process_new_activity() -> _Response:
    """Process an activity received in the inbox"""
    task = p.parse(flask.request)
    app.logger.info(f"task={task!r}")
    iri = task.payload
    try:
        activity = ap.fetch_remote_activity(iri)
        app.logger.info(f"activity={activity!r}")

        flags = {}

        if not activity.published:
            flags.update(flag(MetaKey.PUBLISHED, now()))
        else:
            flags.update(flag(MetaKey.PUBLISHED, activity.published))

        set_inbox_flags(activity, flags)
        app.logger.info(f"a={activity}, flags={flags!r}")
        if flags:
            DB.activities.update_one({"remote_id": activity.id}, {"$set": flags})

        app.logger.info(f"new activity {iri} processed")
    except (ActivityGoneError, ActivityNotFoundError):
        app.logger.exception(f"dropping activity {iri}, skip processing")
        return ""
    except Exception as err:
        app.logger.exception(f"failed to process new activity {iri}")
        raise TaskError() from err

    return ""
Example #3
0
def _announce_set_inbox_flags(activity: ap.Announce,
                              new_meta: _NewMeta) -> None:
    _logger.info(f"set_inbox_flags activity={activity!r}")
    obj = activity.get_object()
    # Is it a Annnounce/boost of local acitivty/from the outbox
    if is_from_outbox(obj):
        # Flag it as a notification
        _flag_as_notification(activity, new_meta)

        # Also set the "keep mark" for the GC (as we want to keep it forever)
        _set_flag(new_meta, MetaKey.GC_KEEP)

    # Dedup boosts (it's annoying to see the same note multipe times on the same page)
    if not find_one_activity({
            **in_inbox(),
            **by_type([ap.ActivityType.CREATE, ap.ActivityType.ANNOUNCE]),
            **by_object_id(obj.id),
            **flag(MetaKey.STREAM, True),
            **published_after(
                datetime.now(timezone.utc) - timedelta(hours=12)),
    }):
        # Display it in the stream only it not there already (only looking at the last 12 hours)
        _set_flag(new_meta, MetaKey.STREAM)

    return None
Example #4
0
def _build_thread(data, include_children=True, query=None):  # noqa: C901
    if query is None:
        query = {}
    data["_requested"] = True
    app.logger.info(f"_build_thread({data!r})")
    root_id = (data["meta"].get(MetaKey.THREAD_ROOT_PARENT.value)
               or data["meta"].get(MetaKey.OBJECT_ID.value)
               or data["remote_id"])

    replies = [data]
    for dat in find_activities({
            **by_object_id(root_id),
            **not_deleted(),
            **by_type(ap.ActivityType.CREATE),
            **query,
    }):
        replies.append(dat)

    for dat in find_activities({
            **flag(MetaKey.THREAD_ROOT_PARENT, root_id),
            **not_deleted(),
            **by_type(ap.ActivityType.CREATE),
            **query,
    }):
        replies.append(dat)

    for dat in DB.replies.find({
            **flag(MetaKey.THREAD_ROOT_PARENT, root_id),
            **not_deleted(),
            **query
    }):
        # Make a Note/Question/... looks like a Create
        dat["meta"].update({
            MetaKey.OBJECT_VISIBILITY.value:
            dat["meta"][MetaKey.VISIBILITY.value]
        })
        dat = {
            "activity": {
                "object": dat["activity"]
            },
            "meta": dat["meta"],
            "_id": dat["_id"],
        }
        replies.append(dat)

    replies = sorted(replies, key=lambda d: d["meta"]["published"])

    # Index all the IDs in order to build a tree
    idx = {}
    replies2 = []
    for rep in replies:
        rep_id = rep["activity"]["object"]["id"]
        if rep_id in idx:
            continue
        idx[rep_id] = rep.copy()
        idx[rep_id]["_nodes"] = []
        replies2.append(rep)

    # Build the tree
    for rep in replies2:
        rep_id = rep["activity"]["object"]["id"]
        if rep_id == root_id:
            continue
        reply_of = ap._get_id(rep["activity"]["object"].get("inReplyTo"))
        try:
            idx[reply_of]["_nodes"].append(rep)
        except KeyError:
            app.logger.info(f"{reply_of} is not there! skipping {rep}")

    # Flatten the tree
    thread = []

    def _flatten(node, level=0):
        node["_level"] = level
        thread.append(node)

        for snode in sorted(
                idx[node["activity"]["object"]["id"]]["_nodes"],
                key=lambda d: d["activity"]["object"]["published"],
        ):
            _flatten(snode, level=level + 1)

    try:
        _flatten(idx[root_id])
    except KeyError:
        app.logger.info(f"{root_id} is not there! skipping")

    return thread