Exemple #1
0
def clean(config: CleanTootsConfig, delete: bool, headless: bool):
    """
    Delete Toots based on rules in config file.

    Without the `--delete` flag, toots will only be displayed.
    """
    if not _config_has_sections(config):
        return
    h = html2text.HTML2Text()
    h.ignore_links = True
    h.ignore_emphasis = True
    h.ignore_images = True
    h.ignore_tables = True

    for section in config.sections():
        section = config[section]
        user_secret_file = config.file(section.get("user_secret_file"))
        mastodon = Mastodon(access_token=user_secret_file)
        user = mastodon.me()
        page = mastodon.account_statuses(user["id"])
        would_delete = []
        protected = []
        while page:
            for toot in page:
                protection_reason = _toot_protection_reason(toot, section)
                if protection_reason:
                    protected.append({"toot": toot, "reason": protection_reason})
                else:
                    would_delete.append(toot)

            page = mastodon.fetch_next(page)

        _delete_or_log(delete, h, headless, mastodon, protected, would_delete)
Exemple #2
0
def yield_statuses(mastodon: Mastodon,
                   account_id: int,
                   *,
                   since_id: int,
                   limit: int = 200) -> Generator[Status, None, None]:
    statuses = mastodon.account_statuses(account_id,
                                         limit=MAX_TOOTS,
                                         since_id=since_id)
    yield from statuses
    while statuses:
        statuses = mastodon.fetch_next(statuses)
        if statuses:
            yield from statuses
Exemple #3
0
    os.chdir(cwd)

    pr = False
    text = []
    for l in s.split("\n"):
        if len(l) == 0: continue
        if "Ok." == l: continue
        if "filename" in l: pr = not pr
        if l[0] != ">" and pr:
            text += [l]
        if l.startswith(">I"):
            text += [l]
        if "Serial number" in l and newsave:
            pr = True

    post_lots(mentions, "\n".join(text), m)
    client.notifications_dismiss(m.id)


client = Mastodon(client_id="clientcred.secret",
                  access_token="usercred.secret",
                  api_base_url=api_base_url)

notifs = client.notifications()
while len(notifs) > 0:
    for m in notifs:
        do_thing(m)
    notifs = client.fetch_next(notifs)

client.stream_user(CallbackStreamListener(notification_handler=do_thing))
Exemple #4
0
class MastodonEbooks:
    def __init__(self, options={}):
        self.api_base_url = options.get("api_base_url", "https://botsin.space")
        self.app_name = options.get("app_name", "ebooks")

        if path.exists("clientcred.secret") and path.exists("usercred.secret"):
            self.client = Mastodon(client_id="clientcred.secret",
                                   access_token="usercred.secret",
                                   api_base_url=self.api_base_url)

        if not path.exists("clientcred.secret"):
            print("No clientcred.secret, registering application")
            Mastodon.create_app(self.app_name,
                                api_base_url=self.api_base_url,
                                to_file="clientcred.secret")

        if not path.exists("usercred.secret"):
            print("No usercred.secret, registering application")
            self.email = input("Email: ")
            self.password = getpass("Password: "******"clientcred.secret",
                                   api_base_url=self.api_base_url)
            self.client.log_in(self.email,
                               self.password,
                               to_file="usercred.secret")

    def setup(self):
        me = self.client.account_verify_credentials()
        following = self.client.account_following(me.id)

        with open("corpus.txt", "w+", encoding="utf-8") as fp:
            for f in following:
                print("Downloading toots for user @{}".format(f.username))
                for t in self._get_toots(f.id):
                    fp.write(t + "\n")

    def gen_toot(self):
        with open("corpus.txt", encoding="utf-8") as fp:
            model = markovify.NewlineText(fp.read())
        sentence = None
        # you will make that damn sentence
        while sentence is None or len(sentence) > 500:
            sentence = model.make_sentence(tries=100000)
        toot = sentence.replace("\0", "\n")
        return toot

    def post_toot(self, toot):
        self.client.status_post(toot, spoiler_text="markov toot")

    def _parse_toot(self, toot):
        if toot.spoiler_text != "": return
        if toot.reblog is not None: return
        if toot.visibility not in ["public", "unlisted"]: return

        soup = BeautifulSoup(toot.content, "html.parser")

        # pull the mentions out
        # for mention in soup.select("span.h-card"):
        #     mention.unwrap()

        # for mention in soup.select("a.u-url.mention"):
        #     mention.unwrap()

        # we will destroy the mentions until we're ready to use them
        # someday turbocat, you will talk to your sibilings
        for mention in soup.select("span.h-card"):
            mention.decompose()

        # make all linebreaks actual linebreaks
        for lb in soup.select("br"):
            lb.insert_after("\n")
            lb.decompose()

        # make each p element its own line because sometimes they decide not to be
        for p in soup.select("p"):
            p.insert_after("\n")
            p.unwrap()

        # keep hashtags in the toots
        for ht in soup.select("a.hashtag"):
            ht.unwrap()

        # unwrap all links (i like the bots posting links)
        for link in soup.select("a"):
            link.insert_after(link["href"])
            link.decompose()

        text = map(lambda a: a.strip(), soup.get_text().strip().split("\n"))

        # next up: store this and patch markovify to take it
        # return {"text": text, "mentions": mentions, "links": links}
        # it's 4am though so we're not doing that now, but i still want the parser updates
        return "\0".join(list(text))

    def _get_toots(self, id):
        i = 0
        toots = self.client.account_statuses(id)
        while toots is not None:
            for toot in toots:
                t = self._parse_toot(toot)
                if t != None:
                    yield t
            toots = self.client.fetch_next(toots)
            i += 1
            if i % 10 == 0:
                print(i)
# ユーザー設定→開発でアプリを登録することで以下の値を得ることができる
mastodon = Mastodon(access_token='your_access_token',
                    client_id='your_client_id',
                    client_secret='your_client_secret',
                    api_base_url='https://instance.com')

account = mastodon.account_verify_credentials()

toots = mastodon.account_statuses(account)
for toot in toots:
    from time import sleep
    print(toot.id)
    mastodon.status_delete(toot)
    sleep(5)  # インスタンスへの負荷に応じて調整してください

# print('deleted first 20 toots')

next_toots = mastodon.fetch_next(toots)

while len(next_toots) != 0:
    for toot in next_toots:
        from time import sleep
        print(toot.id)
        sleep(5)  # インスタンスへの負荷に応じて調整してください
        mastodon.status_delete(toot)

    print("loop done")
    next_toots = mastodon.fetch_next(toots)

print("done")
Exemple #6
0
wordfile = open("wordlist.txt", "r")
wordlist = wordfile.readlines()
wordfile.close()

# Thread handler that adds users that @ the bot to a queue
replyQueue = Queue()
lastIdMastodon = None
try:
    replies = []
    replies_page = mastodon_api.notifications()
    try:
        for i in range(0, 10):
            print("Loaded page " + str(i))
            replies.extend(replies_page)
            replies_page = mastodon_api.fetch_next(replies_page)
    except:
        print("Reached end.")

    replies.reverse()
    for reply in replies:
        if reply["type"] == "mention":
            replyQueue.put((reply["status"]["account"]["acct"], "mastodon",
                            reply["status"]["id"]))
            print("Mastodon: New entry to reply queue: " +
                  str(reply["status"]["account"]["acct"]))
except Exception as e:
    print("Mastodon: Error in fetch replies: " + str(e))

time.sleep(10)
servedUsers = sys.argv[1:]
Exemple #7
0
#Client key
key = ""

#Client Secret
secret = ""

#Access Token
token = ""

#Instance
instance = 'https://mastodon.social'

############################

lstBots = []

mastodon = Mastodon(key, secret, token, api_base_url=instance)

tl = mastodon.timeline_local(max_id=None, since_id=None, limit=40)
for loop in range(0, 5):
    if len(tl) > 0:
        for x in range(0, len(tl)):
            if tl[x].account.bot == True and lstBots.count(
                    tl[x].account.username) == 0:
                lstBots.append(tl[x].account.username)
                mastodon.account_block(tl[x].account.id)
    tl = mastodon.fetch_next(tl[len(tl) - 1]._pagination_next)

print("From the last 200 toots I've blocked " + str(len(lstBots)) + " bots!")