def _delete_process_inbox(delete: ap.Delete, new_meta: _NewMeta) -> None: _logger.info(f"process_inbox activity={delete!r}") obj_id = delete.get_object_id() _logger.debug(f"delete object={obj_id}") try: # FIXME(tsileo): call the DB here instead? like for the oubox obj = ap.fetch_remote_activity(obj_id) _logger.info(f"inbox_delete handle_replies obj={obj!r}") in_reply_to = obj.get_in_reply_to() if obj.inReplyTo else None if obj.has_type(ap.CREATE_TYPES): in_reply_to = ap._get_id( DB.activities.find_one({ "meta.object_id": obj_id, "type": ap.ActivityType.CREATE.value })["activity"]["object"].get("inReplyTo")) if in_reply_to: back._handle_replies_delete(MY_PERSON, in_reply_to) except Exception: _logger.exception(f"failed to handle delete replies for {obj_id}") update_one_activity( { **by_object_id(obj_id), **by_type(ap.ActivityType.CREATE) }, upsert({MetaKey.DELETED: True}), ) # Foce undo other related activities DB.activities.update(by_object_id(obj_id), upsert({MetaKey.UNDO: True}))
def inbox_delete(self, as_actor: ap.Person, delete: ap.Delete) -> None: obj = delete.get_object() logger.debug("delete object={obj!r}") DB.activities.update_one({"activity.object.id": obj.id}, {"$set": { "meta.deleted": True }}) logger.info(f"inbox_delete handle_replies obj={obj!r}") in_reply_to = obj.get_in_reply_to() if obj.inReplyTo else None if delete.get_object().ACTIVITY_TYPE != ap.ActivityType.NOTE: in_reply_to = ap._get_id( DB.activities.find_one({ "activity.object.id": delete.get_object().id, "type": ap.ActivityType.CREATE.value, })["activity"]["object"].get("inReplyTo")) # Fake a Undo so any related Like/Announce doesn't appear on the web UI DB.activities.update( {"meta.object.id": obj.id}, {"$set": { "meta.undo": True, "meta.extra": "object deleted" }}, ) if in_reply_to: self._handle_replies_delete(as_actor, in_reply_to)
def links_from_note(note: Dict[str, Any]) -> Set[str]: note_host = urlparse(ap._get_id(note["id"]) or "").netloc links = set() if "content" in note: soup = BeautifulSoup(note["content"], "html5lib") for link in soup.find_all("a"): h = link.get("href") ph = urlparse(h) if (ph.scheme in {"http", "https"} and ph.netloc != note_host and is_url_valid(h)): links.add(h) # FIXME(tsileo): support summary and name fields return links
def _build_thread(data, include_children=True, query=None): # noqa: C901 if query is None: query = {} data["_requested"] = True app.logger.info(f"_build_thread({data!r})") root_id = (data["meta"].get(MetaKey.THREAD_ROOT_PARENT.value) or data["meta"].get(MetaKey.OBJECT_ID.value) or data["remote_id"]) replies = [data] for dat in find_activities({ **by_object_id(root_id), **not_deleted(), **by_type(ap.ActivityType.CREATE), **query, }): replies.append(dat) for dat in find_activities({ **flag(MetaKey.THREAD_ROOT_PARENT, root_id), **not_deleted(), **by_type(ap.ActivityType.CREATE), **query, }): replies.append(dat) for dat in DB.replies.find({ **flag(MetaKey.THREAD_ROOT_PARENT, root_id), **not_deleted(), **query }): # Make a Note/Question/... looks like a Create dat["meta"].update({ MetaKey.OBJECT_VISIBILITY.value: dat["meta"][MetaKey.VISIBILITY.value] }) dat = { "activity": { "object": dat["activity"] }, "meta": dat["meta"], "_id": dat["_id"], } replies.append(dat) replies = sorted(replies, key=lambda d: d["meta"]["published"]) # Index all the IDs in order to build a tree idx = {} replies2 = [] for rep in replies: rep_id = rep["activity"]["object"]["id"] if rep_id in idx: continue idx[rep_id] = rep.copy() idx[rep_id]["_nodes"] = [] replies2.append(rep) # Build the tree for rep in replies2: rep_id = rep["activity"]["object"]["id"] if rep_id == root_id: continue reply_of = ap._get_id(rep["activity"]["object"].get("inReplyTo")) try: idx[reply_of]["_nodes"].append(rep) except KeyError: app.logger.info(f"{reply_of} is not there! skipping {rep}") # Flatten the tree thread = [] def _flatten(node, level=0): node["_level"] = level thread.append(node) for snode in sorted( idx[node["activity"]["object"]["id"]]["_nodes"], key=lambda d: d["activity"]["object"]["published"], ): _flatten(snode, level=level + 1) try: _flatten(idx[root_id]) except KeyError: app.logger.info(f"{root_id} is not there! skipping") return thread
def _build_thread(data, include_children=True): # noqa: C901 data["_requested"] = True app.logger.info(f"_build_thread({data!r})") root_id = data["meta"].get("thread_root_parent", data["activity"]["object"]["id"]) query = { "$or": [{ "meta.thread_root_parent": root_id }, { "activity.object.id": root_id }], "meta.deleted": False, } replies = [data] for dat in DB.activities.find(query): print(dat["type"]) if dat["type"][0] == ap.ActivityType.CREATE.value: replies.append(dat) if dat["type"][0] == ap.ActivityType.UPDATE.value: continue else: # Make a Note/Question/... looks like a Create dat = { "activity": { "object": dat["activity"] }, "meta": dat["meta"], "_id": dat["_id"], } replies.append(dat) replies = sorted(replies, key=lambda d: d["activity"]["object"]["published"]) # Index all the IDs in order to build a tree idx = {} replies2 = [] for rep in replies: rep_id = rep["activity"]["object"]["id"] if rep_id in idx: continue idx[rep_id] = rep.copy() idx[rep_id]["_nodes"] = [] replies2.append(rep) # Build the tree for rep in replies2: rep_id = rep["activity"]["object"]["id"] if rep_id == root_id: continue reply_of = ap._get_id(rep["activity"]["object"].get("inReplyTo")) try: idx[reply_of]["_nodes"].append(rep) except KeyError: app.logger.info(f"{reply_of} is not there! skipping {rep}") # Flatten the tree thread = [] def _flatten(node, level=0): node["_level"] = level thread.append(node) for snode in sorted( idx[node["activity"]["object"]["id"]]["_nodes"], key=lambda d: d["activity"]["object"]["published"], ): _flatten(snode, level=level + 1) try: _flatten(idx[root_id]) except KeyError: app.logger.info(f"{root_id} is not there! skipping") return thread