示例#1
0
def import_comments(import_id: str, comments: List[Comment]):
    """Imports test comments."""

    log(import_id, "Importing comments...")

    while True:
        for comment in comments:
            import_comment(comment)

        log(import_id, "Done importing comments.")
        return
示例#2
0
def import_user(import_id: str, user: User):
    """Imports a test user."""

    # if is_artist_dnp('kemono-dev', user['id']):
    #     log(import_id, f"Skipping user {user['id']} because they are in do not post list")
    #     return

    try:
        save_user_to_db(user)
        log(import_id, f"Finished importing creator \"{user['id']}\"")
    except Exception as e:
        log(import_id, f"ERROR {e}: FAILED TO IMPORT USER \"{user['id']}\"")
示例#3
0
def generate_random_entries():
    key = dev_random.string(127, 255)
    import_id = get_import_id(key)
    # service = service_name
    target = importer.import_posts
    contributor_id: str = request.form.get("account_id")
    args = (key, contributor_id)

    if target and args:
        logger.log(import_id,
                   f'Starting import. Your import id is \"{import_id}\".')
        FlaskThread(target=import_posts,
                    args=(import_id, target, args)).start()
    else:
        logger.log(
            import_id,
            f'Error starting import. Your import id is \"{import_id}\".')

    return import_id, 200
示例#4
0
def import_files(import_id: str, files: List[Random_File]):
    """Imports test files."""

    log(import_id, "Importing files...")
    log(import_id, f'{len(files)} files are going to be \"imported\"')

    for file in files:
        log(import_id, f"Importing file \"{file['path']}\"")
        # transform the file into `File_Model` there
        import_file(file)

    log(import_id, "Done importing file.")
示例#5
0
def import_users(import_id: str, users: List[User]):
    """Imports test users."""
    log(import_id, f"{len(users)} creators are going to be \"imported\"")

    while True:
        if users:
            for user in users:
                log(import_id, f"Importing user \"{user['id']}\"")
                import_user(import_id, user)
            log(import_id, "Finished importing users")
            return

        else:
            log(import_id, "User not supplied. Will not be imported.")
            return
示例#6
0
def import_dms(import_id: str, dms: List[DM]):
    """Imports test DMs."""

    log(import_id, "Importing DMs...")

    while True:
        for dm in dms:
            log(import_id,
                f"Importing dm \"{dm['id']}\" from user \"{dm['user']}\"")
            import_dm(dm)

        log(import_id, "Done importing DMs.")
        return
示例#7
0
def import_posts(import_id: str, posts: List[Post]):
    """Imports test posts."""

    log(import_id, f'{len(posts)} posts are going to be \"imported\".')

    while True:
        for post in posts:
            log(
                import_id,
                f"Importing post \"{post['id']}\" from user \"{post['user']}\"."
            )
            import_post(post)

        log(import_id, "Done importing posts.")
        return
示例#8
0
def run_paysite_import(import_id: str,
                       key: str,
                       contributor_id: str,
                       random: Extended_Random = dev_random):
    """Runs the importer."""
    setthreadtitle(f'Kitsune Import|{import_id}')
    dataset = generate_dataset(random)
    dms: List[DM] = []
    users: List[User] = []
    posts: List[Post] = []
    comments: List[Comment] = []

    if dataset['dms']:
        for dm in dataset['dms']:
            dm_model = DM(import_id=import_id,
                          contributor_id=contributor_id,
                          id=dm['id'],
                          user=dm['user'],
                          service=service_name,
                          file={},
                          published=dm['published'],
                          content=dm['content'])
            dms.append(dm_model)

    if dataset['users']:
        for user in dataset['users']:
            user_model = User(id=user['id'],
                              name=user['name'],
                              service=service_name)
            users.append(user_model)

            if user['posts']:
                for post in user['posts']:
                    files: List[File] = []
                    file_item: File = None
                    atttachments: List[File] = []

                    if post['files']:
                        for file in post['files']:

                            # file_model = download_file(
                            #     file_path=file['path'],
                            #     service=service_name,
                            #     user=user['id'],
                            #     post=post['id'],
                            #     file_name=file['name']
                            # )

                            # files.append(file_model)
                            files.append(file)

                    if files:
                        file_item = files[0]
                    else:
                        file_item = {}

                    if len(files) > 1:
                        atttachments.extend(files[1:])

                    post_model = Post(
                        id=post['id'],
                        user=post['user'],
                        service=service_name,
                        file=file_item,
                        attachments=[],
                        published=post['published'],
                        edited=post['edited'],
                        shared_file=False,
                        added=datetime.now(),
                        title=post['title'],
                        content=post['content'],
                        embed={},
                    )
                    posts.append(post_model)

                    if post['comments']:
                        for comment in post['comments']:
                            comment_model = Comment(
                                id=comment['id'],
                                post_id=post['id'],
                                commenter=comment['commenter_id'],
                                content=comment['content'],
                                service=service_name,
                                published=comment['published'],
                                parent_id=comment['parent_id'])
                            comments.append(comment_model)

    log(import_id, f'{len(dms)} DMs are going to be \"imported\"')
    import_dms(import_id, dms)
    log(import_id, f'{len(users)} artists are going to be \"imported\"')
    import_users(import_id, users)
    log(import_id, f'{len(posts)} posts are going to be \"imported\"')
    import_posts(import_id, posts)
    log(import_id, f'{len(comments)} comments are going to be \"imported\"')
    import_comments(import_id, comments)

    log(import_id,
        f"Finished the import \"{import_id}\" of service \"{service_name}\".")
    delete_keys([f'imports:{import_id}'])
示例#9
0
def watch(queue_limit=config.pubsub_queue_limit):
    archiver_id = ''.join(random.choice(string.ascii_letters + string.digits) for x in range(16))
    delete_keys_pattern([f"running_imports:*"])
    setthreadtitle(f'KWATCHER')
    print(f'Key watcher ({archiver_id}) is starting!')

    redis = get_redis()
    threads_to_run = []
    while True:
        for thread in threads_to_run:
            if not thread.is_alive():
                threads_to_run.remove(thread)
        
        for key in scan_keys('imports:*'):
            key_data = redis.get(key)
            if key_data:
                import_id = key.split(':')[1]
                try:
                    key_data = json.loads(key_data)
                except json.decoder.JSONDecodeError:
                    print(f'An decoding error occured while processing import request {key.decode("utf-8")}; are you sending malformed JSON?')
                    delete_keys([key])
                    continue
                
                if redis.get(f"running_imports:{archiver_id}:{import_id}"):
                    continue

                if len(threads_to_run) < queue_limit:
                    try:
                        target = None
                        args = None
                        # data = {
                        #     'key': key,
                        #     'key_id': key_id,
                        #     'service': service,
                        #     'allowed_to_auto_import': allowed_to_auto_import,
                        #     'allowed_to_save_session': allowed_to_save_session,
                        #     'allowed_to_scrape_dms': allowed_to_scrape_dms,
                        #     'channel_ids': channel_ids,
                        #     'contributor_id': contributor_id
                        # }
                        service_key = key_data['key']
                        key_id = key_data.get('key_id', None)
                        service = key_data['service']
                        allowed_to_auto_import = key_data.get('auto_import', False)
                        allowed_to_save_session = key_data.get('save_session_key', False)
                        allowed_to_scrape_dms = key_data.get('save_dms', False)
                        channel_ids = key_data.get('channel_ids')
                        contributor_id = key_data.get('contributor_id')

                        if service_key and service and allowed_to_save_session:
                            try:
                                encrypt_and_log_session(import_id, service, service_key)
                            except:
                                logger.log(import_id, 'Exception occured while logging session.', 'exception', to_client=False)

                        if service == 'patreon':
                            target = patreon.import_posts
                            args = (service_key, allowed_to_scrape_dms, contributor_id, allowed_to_auto_import, key_id)
                        elif service == 'fanbox':
                            target = fanbox.import_posts
                            args = (service_key, contributor_id, allowed_to_auto_import, key_id)
                        elif service == 'subscribestar':
                            target = subscribestar.import_posts
                            args = (service_key, contributor_id, allowed_to_auto_import, key_id)
                        elif service == 'gumroad':
                            target = gumroad.import_posts
                            args = (service_key, contributor_id, allowed_to_auto_import, key_id)
                        elif service == 'fantia':
                            target = fantia.import_posts
                            args = (service_key, contributor_id, allowed_to_auto_import, key_id)
                        elif service == 'discord':
                            target = discord.import_posts
                            if channel_ids is None:
                                channel_ids = ''
                            args = (service_key, channel_ids.strip().replace(" ", ""), contributor_id, allowed_to_auto_import, key_id)
                        else:
                            logger.log(import_id, f'Service "{service}" unsupported.')
                            delete_keys([key])
                            continue

                        if target is not None and args is not None:
                            logger.log(import_id, f'Starting import. Your import id is {import_id}.')
                            thread = Thread(target=import_posts, args=(import_id, target, args))
                            thread.start()
                            threads_to_run.append(thread)
                            redis.set(f"running_imports:{archiver_id}:{import_id}", '1')
                        else:
                            logger.log(import_id, f'Error starting import. Your import id is {import_id}.')
                    except KeyError:
                        logger.log(import_id, 'Exception occured while starting import due to missing data in payload.', 'exception', to_client=True)
                        delete_keys([key])
        
        time.sleep(1)