Пример #1
0
def get_from_table(table_name):
    logging.info(f'{str(datetime.datetime.now())}: Getting {table_name}')
    con = psycopg2.connect(DATABASE_URL, sslmode='require')
    result = query_all(con, table_name)
    con.close()

    return [d[1] for d in result]
Пример #2
0
def get_question_id_list(type='选择题'):
    """
        获取 试题
    """
    sql = "select id from docai.question where type='{type}'"
    ids = db_helper.query_all(sql.format(type=type))
    return [id['id'] for id in ids]
Пример #3
0
async def on_ready():
    channel = client.get_channel(CHANNEL_ID)
    logging.info(f'{str(datetime.datetime.now())}: Bot is ready')

    while True:
        logging.info(
            f'{str(datetime.datetime.now())}: Checking for new submissions: ')

        con = psycopg2.connect(DATABASE_URL, sslmode='require')

        all_subreddits = '+'.join([d[1] for d in query_all(con, 'subreddit')])
        all_keywords = [d[1] for d in query_all(con, 'keyword')]
        all_forbidden_words = [d[1] for d in query_all(con, 'forbidden_word')]

        logging.info(
            f'{str(datetime.datetime.now())}: Subreddits: {all_subreddits}')
        logging.info(
            f'{str(datetime.datetime.now())}: Keywords: {all_keywords}')
        logging.info(
            f'{str(datetime.datetime.now())}: Forbidden Words: {all_forbidden_words}'
        )
        logging.info(f'{str(datetime.datetime.now())}: Begin scraping')

        submissions = get_scraped_submissions(all_subreddits, all_keywords,
                                              all_forbidden_words)

        for submission in submissions:
            submission_does_exist = does_exist(con, 'submission', 'id',
                                               submission.id)

            if not submission_does_exist:
                logging.info(
                    f'{str(datetime.datetime.now())}: Found new submission: {submission.title[:100]}'
                )
                insert(
                    con, 'submission', ['id', 'title'],
                    [submission.id, submission.title[:100].replace("'", "")])
                await channel.send(
                    f'```{submission.title}```\n @everyone \n\n{submission.url}'
                )

        logging.info(f'{str(datetime.datetime.now())}: Finished scraping')

        # # Close the connection and sleep for 1 minute
        con.close()
        await asyncio.sleep(60)
Пример #4
0
def get_question_id_list(stage='语文', subject='数学', type='选择题'):
    sql = "select a.id as id from question as a join link_knowledge as b on a.type='{type}' and a.id=b.question_id  join knowledge as c on b.knowledge_id=c.id where c.stage='{stage}' and c.subject='{subject}' order by a.id"

    if type is None:
        sql = "select b.question_id as id from knowledge as c  join link_knowledge as b on b.knowledge_id=c.id where c.stage='{stage}' and c.subject='{subject}' order by b.question_id "
    f_sql = sql.format(stage=stage, subject=subject, type=type)
    ids = db_helper.query_all(f_sql)
    id_list = [id['id'] for id in ids]
    return id_list