def test_user_session():
    with session_scope(DIRK) as session:
        # don't want to do any commits just yet, so check that we get back the right type of object
        assert isinstance(session, Session)
        # and that it's configured correctly.
        # Note: calling str() on the `.url` property calls the `sqlalchemy.engine.url.URL.__to_string__`
        # method which handles creating a valid string representation of the connection credentials
        assert str(session.bind.url) == DIRK
def test_find_by_spider():
    with session_scope(FARAH) as session:
        assert session.query(ScrapedJob.data).filter(
            ScrapedJob.spider == 'tarantula').one()[0] == example_data[0]
        assert session.query(ScrapedJob.data).filter(
            ScrapedJob.spider == None).one()[0] == example_data[1]
        assert session.query(ScrapedJob.data).filter(
            ScrapedJob.spider == '').one()[0] == example_data[2]
def test_db_setup():
    # this test uses the superuser that is installed by default, `postgres` since we can only
    # install extensions as a super user. the extension is needed because the table makes use
    # of the `gen_random_uuid` function which comes from the `pgcrypto` extension.
    with session_scope(SUPERUSER) as session:
        connection = session.bind
        install_pgcrypto(connection)
        create_table(connection)
        alter_table_owner(connection, 'dirk_gently', 'scraped-jobs')
def test_insert_of_example_item():
    job = ScrapedJob.from_dict(example)

    with session_scope(DIRK) as session:
        session.add(job)
        session.commit()

        query = session.query(ScrapedJob).filter(ScrapedJob.url == example['url'])
        result = query.one()
        assert job.url == result.url == example['url']
        assert job.data == result.data == example
def test_as_dict():
    with session_scope(FARAH) as session:
        job = session.query(ScrapedJob).filter(
            ScrapedJob.spider == 'tarantula').one()
        for key, value in example_data[0].items():
            assert key in job.as_dict()
            assert job.as_dict()[key] == value

        for key in ('created_at', 'last_modified', 'id'):
            assert key in job.as_dict()
            assert isinstance(job.as_dict()[key], str)
def test_user_creation():
    with session_scope(SUPERUSER) as session:
        query = session.bind.execute("""SELECT rolname FROM pg_roles WHERE rolname ='todd_brotzman'""")
        result = query.fetchone()
        # we should get None since the user doesn't yet exist
        assert result is None

        create_user(session.bind, 'todd_brotzman')

        query = session.bind.execute("""SELECT rolname FROM pg_roles WHERE rolname ='todd_brotzman'""")
        result = query.fetchone()
        # now we get a row containing the user
        assert result == ('todd_brotzman',)
def setup_module(module):
    # connect as superuser and setup the db
    engine = create_engine(SUPERUSER)
    install_pgcrypto(engine)
    create_table(engine)
    create_user(engine, 'farah_black')
    alter_table_owner(engine, 'farah_black', 'scraped-jobs')
    Base.metadata.create_all(engine)

    # now connect as Farah and insert a bunch of stuff
    with session_scope(FARAH) as session:
        for example in example_data:
            job = ScrapedJob.from_dict(example)
            session.add(job)
def test_spider_and_posted():
    with session_scope(FARAH) as session:
        query = session.query(ScrapedJob).filter(
            ScrapedJob.url == 'https://example.com/job/1/')
        job = query.one()
        assert job.spider == 'tarantula'
        assert job.posted == datetime.datetime(2017, 1, 1, 9, 35, 7)

        query = session.query(ScrapedJob).filter(
            ScrapedJob.url == 'https://example.com/job/2/')
        job = query.one()
        assert job.spider is None
        assert job.posted == datetime.datetime(2017, 2, 12, 12, 17, 53)

        query = session.query(ScrapedJob).filter(
            ScrapedJob.url == 'https://example.com/job/3/')
        job = query.one()
        assert job.spider == ''
        assert job.posted == datetime.datetime(2004, 9, 20, 16, 45, 22)
def test_date_of_posting():
    with session_scope(FARAH) as session:
        job = session.query(ScrapedJob).filter(
            ScrapedJob.date_of_posting == '2017-01-01').one()
        assert job.date_of_posting.isoformat() == '2017-01-01'