Пример #1
0
def test_prunes_doesnt_prune_snapshots_ts(library):
    coll = library._collection

    a = ts1
    c = ts2
    now = dt.utcnow()
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=125))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=122))):
        library.write(symbol, c, prune_previous_version=False)
    library.snapshot('snap')
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=121))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=119))):
        library.write(symbol, c, prune_previous_version=False)
    assert coll.versions.count() == 4

    # Prunes all versions older than the most recent version that's older than 10 mins
    library.write(symbol, a, prune_previous_version=True)
    assert coll.versions.count() == 4
    assert_frame_equal(library.read(symbol, as_of='snap').data, c)
    assert_frame_equal(library.read(symbol, as_of=3).data, a)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, a)

    # Remove the snapshot, the version should now be pruned
    library.delete_snapshot('snap')
    assert coll.versions.count() == 4
    library.write(symbol, c, prune_previous_version=True)
    assert coll.versions.count() == 4
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, a)
    assert_frame_equal(library.read(symbol, as_of=6).data, c)
Пример #2
0
def test_prunes_multiple_versions_ts(library):
    coll = library._collection

    a = ts1
    c = ts2
    # Create an ObjectId
    now = dt.utcnow()
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=125))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=122))):
        library.write(symbol, c, prune_previous_version=False)
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=121))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=119))):
        library.write(symbol, c, prune_previous_version=False)
    assert coll.versions.count() == 4

    # Prunes all versions older than the most recent version that's older than 10 mins
    library.write(symbol, a, prune_previous_version=True)
    assert coll.versions.count() == 3
    assert_frame_equal(library.read(symbol, as_of=3).data, a)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, a)
Пример #3
0
def test_prunes_doesnt_prune_snapshots_ts(library):
    coll = library._collection

    a = ts1
    c = ts2
    now = dt.utcnow()
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=125))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=122))):
        library.write(symbol, c, prune_previous_version=False)
    library.snapshot('snap')
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=121))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=119))):
        library.write(symbol, c, prune_previous_version=False)
    assert coll.versions.count() == 4

    # Prunes all versions older than the most recent version that's older than 10 mins
    library.write(symbol, a, prune_previous_version=True)
    assert coll.versions.count() == 4
    assert_frame_equal(library.read(symbol, as_of='snap').data, c)
    assert_frame_equal(library.read(symbol, as_of=3).data, a)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, a)

    # Remove the snapshot, the version should now be pruned
    library.delete_snapshot('snap')
    assert coll.versions.count() == 4
    library.write(symbol, c, prune_previous_version=True)
    assert coll.versions.count() == 4
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, a)
    assert_frame_equal(library.read(symbol, as_of=6).data, c)
Пример #4
0
def test_append_update(library):
    library.write(symbol, ts1)
    library.snapshot('snap')

    coll = library._collection

    # Assertions:
    assert coll.versions.count() == 1
    assert_frame_equal(library.read(symbol).data, ts1)

    # Append an item
    dts = list(ts1.index)
    dts.append(dts[-1] + dtd(days=1))
    values = list(ts1.near.values)
    values.append(47.)
    ts2 = pd.DataFrame(index=dts, data=values, columns=ts1.columns)
    ts2.index.name = ts1.index.name

    # Saving ts2 shouldn't create any new chunks.  Instead it should
    # reuse the last chunk.
    library.write(symbol, ts2, prune_previous_version=False)
    assert coll.versions.count() == 2
    assert_frame_equal(library.read(symbol, as_of='snap').data, ts1)
    assert_frame_equal(library.read(symbol).data, ts2)

    # We should be able to save a smaller timeseries too
    # This isn't likely to happen, so we don't care too much about space saving
    # just make sure we get it right.
    library.write(symbol, ts1, prune_previous_version=False)
    assert_frame_equal(library.read(symbol, as_of=1).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=2).data, ts2)
    assert_frame_equal(library.read(symbol, as_of=3).data, ts1)

    # Append an item, and add a whole new chunk
    dts = list(ts2.index)
    dts.append(dts[-1] + dtd(days=1))
    dts.append(dts[-1] + dtd(days=40))
    values = list(ts2.near.values)
    values.append(47.)
    values.append(53.)
    ts3 = pd.DataFrame(index=dts, data=values, columns=ts1.columns)
    ts3.index.name = ts1.index.name

    library.write(symbol, ts3, prune_previous_version=False)
    assert_frame_equal(library.read(symbol, as_of=1).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=2).data, ts2)
    assert_frame_equal(library.read(symbol, as_of=3).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=4).data, ts3)

    library.write(symbol, ts3, prune_previous_version=False)
    assert_frame_equal(library.read(symbol, as_of=1).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=2).data, ts2)
    assert_frame_equal(library.read(symbol, as_of=3).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=4).data, ts3)
    assert_frame_equal(library.read(symbol, as_of=5).data, ts3)
Пример #5
0
def test_append_update(library):
    library.write(symbol, ts1)
    library.snapshot('snap')

    coll = library._collection

    # Assertions:
    assert coll.versions.count() == 1
    assert_frame_equal(library.read(symbol).data, ts1)

    # Append an item
    dts = list(ts1.index)
    dts.append(dts[-1] + dtd(days=1))
    values = list(ts1.near.values)
    values.append(47.)
    ts2 = pd.DataFrame(index=dts, data=values, columns=ts1.columns)
    ts2.index.name = ts1.index.name

    # Saving ts2 shouldn't create any new chunks.  Instead it should
    # reuse the last chunk.
    library.write(symbol, ts2, prune_previous_version=False)
    assert coll.versions.count() == 2
    assert_frame_equal(library.read(symbol, as_of='snap').data, ts1)
    assert_frame_equal(library.read(symbol).data, ts2)

    # We should be able to save a smaller timeseries too
    # This isn't likely to happen, so we don't care too much about space saving
    # just make sure we get it right.
    library.write(symbol, ts1, prune_previous_version=False)
    assert_frame_equal(library.read(symbol, as_of=1).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=2).data, ts2)
    assert_frame_equal(library.read(symbol, as_of=3).data, ts1)

    # Append an item, and add a whole new chunk
    dts = list(ts2.index)
    dts.append(dts[-1] + dtd(days=1))
    dts.append(dts[-1] + dtd(days=40))
    values = list(ts2.near.values)
    values.append(47.)
    values.append(53.)
    ts3 = pd.DataFrame(index=dts, data=values, columns=ts1.columns)
    ts3.index.name = ts1.index.name

    library.write(symbol, ts3, prune_previous_version=False)
    assert_frame_equal(library.read(symbol, as_of=1).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=2).data, ts2)
    assert_frame_equal(library.read(symbol, as_of=3).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=4).data, ts3)

    library.write(symbol, ts3, prune_previous_version=False)
    assert_frame_equal(library.read(symbol, as_of=1).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=2).data, ts2)
    assert_frame_equal(library.read(symbol, as_of=3).data, ts1)
    assert_frame_equal(library.read(symbol, as_of=4).data, ts3)
    assert_frame_equal(library.read(symbol, as_of=5).data, ts3)
Пример #6
0
def testRule(rule, start, end):
    try:
        start = (dp.parse(start[:-5]) - dtd(days=1)).isoformat()
        end = (dp.parse(end[:-5]) + dtd(days=0.5)).isoformat()

        os.system(f"elastalert-test-rule --alert --config {args.config} --start {start} --end {end} {rule} >/dev/null 2>&1")
        time.sleep(0.125)
                
        return True
                
    except ValueError: printMessage("[ERROR] THERE ARE NO INDEXED LOGS.", info="fail")
    
    return False
Пример #7
0
def test_dont_cleanup_recent_orphaned_snapshots(mongo_host, library, data, dry_run):
    """
    Check that we do / don't cleanup chunks based on the dry-run
    """
    today = dt.utcnow() - dtd(hours=12, seconds=1)
    _id = bson.ObjectId.from_datetime(today)
    library.write('symbol', data, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=_id):
        library.snapshot('snap_name')

    # Remove the version document ; should cleanup
    assert library._collection.snapshots.delete_many({})

    # No cleanup on dry-run
    if dry_run:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host)
        assert library._collection.count() > 0
        assert library._collection.versions.count()
        assert repr(library.read('symbol').data) == repr(data)
        # Nothing done
        assert len(library._collection.versions.find_one({})['parent'])
    else:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host, '-f')
        assert library._collection.count() > 0
        assert library._collection.versions.count()
        # Data still available (write with prune_previous_version will do the cleanup)
        assert repr(library.read('symbol').data) == repr(data)
        # Snapshot cleaned up
        assert len(library._collection.versions.find_one({})['parent'])
Пример #8
0
def test_cleanup_orphaned_chunk_doesnt_break_versions(mongo_host, library,
                                                      data,
                                                      fw_pointers_config):
    """
    Check that a chunk pointed to by more than one version, aren't inadvertently cleared
    """
    with FwPointersCtx(fw_pointers_config):
        yesterday = dt.utcnow() - dtd(days=1, seconds=1)
        _id = bson.ObjectId.from_datetime(yesterday)
        with patch("bson.ObjectId", return_value=_id):
            library.write('symbol', data, prune_previous_version=False)

        # Re-Write the data again
        # Write a whole new version rather than going down the append path...
        #     - we want two self-standing versions, the removal of one shouldn't break the other...
        with patch('arctic.store._ndarray_store._APPEND_COUNT', 0):
            library.write('symbol', data, prune_previous_version=False)
        library._delete_version('symbol', 1)
        library._collection.versions.delete_one({'_id': _id})
        assert repr(library.read('symbol').data) == repr(data)

        run_as_main(main, '--library', 'user.library', '--host', mongo_host,
                    '-f')
        assert repr(library.read('symbol').data) == repr(data)
        library.delete('symbol')
        assert mongo_count(library._collection.versions) == 0
Пример #9
0
def test_cleanup_orphaned_snapshots_nop(mongo_host, library, data, dry_run,
                                        fw_pointers_config):
    """
    Check that we do / don't cleanup chunks based on the dry-run
    """
    with FwPointersCtx(fw_pointers_config):
        yesterday = dt.utcnow() - dtd(days=1, seconds=1)
        _id = bson.ObjectId.from_datetime(yesterday)
        library.write('symbol', data, prune_previous_version=False)
        with patch("bson.ObjectId", return_value=_id):
            library.snapshot('snap_name')

        # No cleanup on dry-run
        if dry_run:
            run_as_main(main, '--library', 'user.library', '--host',
                        mongo_host)
            assert mongo_count(library._collection) > 0
            assert mongo_count(library._collection.versions)
            assert repr(library.read('symbol').data) == repr(data)
            # Nothing done
            assert len(library._collection.versions.find_one({})['parent'])
        else:
            run_as_main(main, '--library', 'user.library', '--host',
                        mongo_host, '-f')
            assert mongo_count(library._collection) > 0
            assert mongo_count(library._collection.versions)
            # Data still available (write with prune_previous_version will do the cleanup)
            assert repr(library.read('symbol').data) == repr(data)
            # Nothing done
            assert len(library._collection.versions.find_one({})['parent'])
Пример #10
0
def test_cleanup_orphaned_chunks(mongo_host, library, data, dry_run,
                                 fw_pointers_config):
    """
    Check that we do / don't cleanup chunks based on the dry-run
    """
    with FwPointersCtx(fw_pointers_config):
        yesterday = dt.utcnow() - dtd(days=1, seconds=1)
        _id = bson.ObjectId.from_datetime(yesterday)
        with patch("bson.ObjectId", return_value=_id):
            library.write('symbol', data, prune_previous_version=False)

        # Number of chunks
        chunk_count = mongo_count(library._collection)
        # Remove the version document ; should cleanup
        library._collection.versions.delete_one({'_id': _id})

        # No cleanup on dry-run
        if dry_run:
            run_as_main(main, '--library', 'user.library', '--host',
                        mongo_host)
            assert mongo_count(library._collection) == chunk_count
        else:
            run_as_main(main, '--library', 'user.library', '--host',
                        mongo_host, '-f')
            assert mongo_count(library._collection) == 0
Пример #11
0
def test_dont_cleanup_recent_orphaned_snapshots(mongo_host, library, data,
                                                dry_run):
    """
    Check that we do / don't cleanup chunks based on the dry-run
    """
    today = dt.utcnow() - dtd(hours=12, seconds=1)
    _id = bson.ObjectId.from_datetime(today)
    library.write('symbol', data, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=_id):
        library.snapshot('snap_name')

    # Remove the version document ; should cleanup
    assert library._collection.snapshots.delete_many({})

    # No cleanup on dry-run
    if dry_run:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host)
        assert library._collection.count() > 0
        assert library._collection.versions.count()
        assert repr(library.read('symbol').data) == repr(data)
        # Nothing done
        assert len(library._collection.versions.find_one({})['parent'])
    else:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host,
                    '-f')
        assert library._collection.count() > 0
        assert library._collection.versions.count()
        # Data still available (write with prune_previous_version will do the cleanup)
        assert repr(library.read('symbol').data) == repr(data)
        # Snapshot cleaned up
        assert len(library._collection.versions.find_one({})['parent'])
Пример #12
0
def is_holiday(timeStepEnd, use_UK, extraHolidays):
    ''' Determines if the given date falls on a bank holiday
     Considers UK holidays if use_UK = True
     extraHolidays: List of datetime objects containing any extra holidays.
     UK holidays generated automatically unless unexpected'''

    if type(timeStepEnd) in [
            type(dt(2015, 1, 1)),
            pd.datetime(2015, 1, 1), pd.Timestamp
    ]:
        reqDate = timeStepEnd.date()
    elif type(timeStepEnd) is type(dtd(2015, 1, 1)):
        reqDate = timeStepEnd
    else:
        raise ValueError('Input date is invalid type: ' +
                         str(type(timeStepEnd)) +
                         '. Must be datetime() or datetime.date()')

    if use_UK:
        if reqDate in holidaysForYear(timeStepEnd.year):
            return True

    if reqDate in extraHolidays:
        return True

    return False
Пример #13
0
def test_delete_version_shouldnt_break_read(library):
    data = np.arange(30)
    yesterday = dt.utcnow() - dtd(days=1, seconds=1)
    _id = bson.ObjectId.from_datetime(yesterday)
    with patch("bson.ObjectId", return_value=_id):
        library.write('symbol', data, prune_previous_version=False)

    # Re-Write the data again
    library.write('symbol', data, prune_previous_version=False)
    library._delete_version('symbol', 1)
    assert repr(library.read('symbol').data) == repr(data)
Пример #14
0
def test_delete_version_shouldnt_break_read(library):
    data = np.arange(30)
    yesterday = dt.utcnow() - dtd(days=1, seconds=1)
    _id = bson.ObjectId.from_datetime(yesterday)
    with patch("bson.ObjectId", return_value=_id):
        library.write('symbol', data, prune_previous_version=False)

    # Re-Write the data again
    library.write('symbol', data, prune_previous_version=False)
    library._delete_version('symbol', 1)
    assert repr(library.read('symbol').data) == repr(data)
Пример #15
0
def test_prunes_multiple_versions_fully_different_tss(library):
    coll = library._collection

    a = ts1
    b = ts2
    c = b.copy()
    c.index = [i + dtd(days=365) for i in c.index]
    c.index.name = b.index.name
    # Create an ObjectId
    now = dt.utcnow()
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=125))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=124))):
        library.write(symbol, b, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=122))):
        library.write(symbol, c, prune_previous_version=False)
    # a b and c versions above will be pruned a and b share months
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=121))):
        library.write(symbol, c, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=119))):
        library.write(symbol, c, prune_previous_version=False)
    assert coll.versions.count() == 5

    # Prunes all versions older than the most recent version that's older than 10 mins
    library.write(symbol, c, prune_previous_version=True)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, c)
    assert_frame_equal(library.read(symbol, as_of=6).data, c)
Пример #16
0
def test_prunes_multiple_versions_fully_different_tss(library):
    coll = library._collection

    a = ts1
    b = ts2
    c = b.copy()
    c.index = [i + dtd(days=365) for i in c.index]
    c.index.name = b.index.name
    # Create an ObjectId
    now = dt.utcnow()
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=125))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=124))):
        library.write(symbol, b, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=122))):
        library.write(symbol, c, prune_previous_version=False)
    # a b and c versions above will be pruned a and b share months
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=121))):
        library.write(symbol, c, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=119))):
        library.write(symbol, c, prune_previous_version=False)
    assert coll.versions.count() == 5

    # Prunes all versions older than the most recent version that's older than 10 mins
    library.write(symbol, c, prune_previous_version=True)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, c)
    assert_frame_equal(library.read(symbol, as_of=6).data, c)
Пример #17
0
def test_should_list_symbols_from_the_underlying_library(toplevel_tickstore, arctic, start, end, startr, endr):
    arctic.initialize_library('FEED_2010.LEVEL1', tickstore.TICK_STORE_TYPE)
    arctic.initialize_library('FEED_2011.LEVEL1', tickstore.TICK_STORE_TYPE)
    toplevel_tickstore.add(DateRange(start=dt(2010, 1, 1), end=dt(2010, 12, 31, 23, 59, 59, 999000)), 'FEED_2010.LEVEL1')
    toplevel_tickstore.add(DateRange(start=dt(2011, 1, 1), end=dt(2011, 12, 31, 23, 59, 59, 999000)), 'FEED_2011.LEVEL1')
    dtstart = dt(2010, 1, 1, tzinfo=mktz('Europe/London'))
    for i in range(10):
        dates = pd.date_range(dtstart, periods=50, tz=mktz('Europe/London'))
        df = pd.DataFrame(np.random.randn(50, 4), index=dates, columns=list('ABCD'))
        dtstart = dates[-1] + dtd(days=1)
        toplevel_tickstore.write('sym' + str(i), df)
    expected_symbols = ['sym' + str(i) for i in range(startr, endr)]
    assert expected_symbols == toplevel_tickstore.list_symbols(DateRange(start=start, end=end))
Пример #18
0
def test_should_list_symbols_from_the_underlying_library(toplevel_tickstore, arctic, start, end, startr, endr):
    arctic.initialize_library('FEED_2010.LEVEL1', tickstore.TICK_STORE_TYPE)
    arctic.initialize_library('FEED_2011.LEVEL1', tickstore.TICK_STORE_TYPE)
    toplevel_tickstore.add(DateRange(start=dt(2010, 1, 1), end=dt(2010, 12, 31, 23, 59, 59, 999000)), 'FEED_2010.LEVEL1')
    toplevel_tickstore.add(DateRange(start=dt(2011, 1, 1), end=dt(2011, 12, 31, 23, 59, 59, 999000)), 'FEED_2011.LEVEL1')
    dtstart = dt(2010, 1, 1, tzinfo=mktz('Europe/London'))
    for i in range(10):
        dates = pd.date_range(dtstart, periods=50, tz=mktz('Europe/London'))
        df = pd.DataFrame(np.random.randn(50, 4), index=dates, columns=list('ABCD'))
        dtstart = dates[-1] + dtd(days=1)
        toplevel_tickstore.write('sym' + str(i), df)
    expected_symbols = ['sym' + str(i) for i in range(startr, endr)]
    assert expected_symbols == toplevel_tickstore.list_symbols(DateRange(start=start, end=end))
Пример #19
0
def test_prunes_multiple_versions_ts(library):
    coll = library._collection

    a = ts1
    c = ts2
    # Create an ObjectId
    now = dt.utcnow()
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=125))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=122))):
        library.write(symbol, c, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=121))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=119))):
        library.write(symbol, c, prune_previous_version=False)
    assert coll.versions.count() == 4

    # Prunes all versions older than the most recent version that's older than 10 mins
    library.write(symbol, a, prune_previous_version=True)
    assert coll.versions.count() == 3
    assert_frame_equal(library.read(symbol, as_of=3).data, a)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, a)
Пример #20
0
def test_prunes_doesnt_prune_snapshots_fully_different_tss(library):
    coll = library._collection

    a = ts1
    b = ts2
    c = b.copy()
    c.index = [i + dtd(days=365) for i in c.index]
    c.index.name = b.index.name
    now = dt.utcnow()
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=125))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=123))):
        library.write(symbol, b, prune_previous_version=False)
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=122))):
        library.write(symbol, c, prune_previous_version=False)
    library.snapshot('snap')
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=121))):
        library.write(symbol, c, prune_previous_version=False)
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=118))):
        library.write(symbol, c, prune_previous_version=False)
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=119))):
        library.write(symbol, c, prune_previous_version=False)
    assert coll.versions.count() == 6

    # Prunes all versions older than the most recent version that's older than 10 mins
    library.write(symbol, c, prune_previous_version=True)
    assert coll.versions.count() == 5
    assert_frame_equal(library.read(symbol, as_of='snap').data, c)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, c)
    assert_frame_equal(library.read(symbol, as_of=6).data, c)
    assert_frame_equal(library.read(symbol, as_of=7).data, c)

    library.delete_snapshot('snap')
    assert coll.versions.count() == 5
    library.write(symbol, c, prune_previous_version=True)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, c)
    assert_frame_equal(library.read(symbol, as_of=6).data, c)
    assert_frame_equal(library.read(symbol, as_of=7).data, c)
Пример #21
0
def test_cleanup_orphaned_chunks_ignores_recent(mongo_host, library, data, dry_run):
    """
    We don't cleanup any chunks in the range of today.  That's just asking for trouble
    """
    yesterday = dt.utcnow() - dtd(hours=12)
    _id = bson.ObjectId.from_datetime(yesterday)
    with patch("bson.ObjectId", return_value=_id):
        library.write('symbol', data, prune_previous_version=False)
    chunk_count = library._collection.count()
    library._collection.versions.delete_one({'_id': _id})

    if dry_run:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host)
        assert library._collection.count() == chunk_count
    else:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host, '-f')
        assert library._collection.count() == chunk_count
Пример #22
0
def test_cleanup_orphaned_chunks_ignores_recent(mongo_host, library, data, dry_run):
    """
    We don't cleanup any chunks in the range of today.  That's just asking for trouble
    """
    yesterday = dt.utcnow() - dtd(hours=12)
    _id = bson.ObjectId.from_datetime(yesterday)
    with patch("bson.ObjectId", return_value=_id):
        library.write('symbol', data, prune_previous_version=False)
    chunk_count = mongo_count(library._collection)
    library._collection.versions.delete_one({'_id': _id})

    if dry_run:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host)
        assert mongo_count(library._collection) == chunk_count
    else:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host, '-f')
        assert mongo_count(library._collection) == chunk_count
Пример #23
0
def test_read_as_of_LondonTime():
    # When we do a read, with naive as_of, that as_of is treated in London Time.
    vs = create_autospec(VersionStore,
                         instance=True,
                         _versions=Mock(),
                         _allow_secondary=False)
    VersionStore._read_metadata(vs, 'symbol', dt(2013, 4, 1, 9, 0))
    versions = vs._versions.with_options.return_value
    versions.find_one.assert_called_once_with(
        {
            'symbol': 'symbol',
            '_id': {
                '$lt':
                bson.ObjectId.from_datetime(
                    dt(2013, 4, 1, 9, 0, tzinfo=mktz()) + dtd(seconds=1))
            }
        },
        sort=[('_id', pymongo.DESCENDING)])
Пример #24
0
def test_list_version(library):
    assert len(list(library.list_versions(symbol))) == 0
    dates = [None, None, None]
    now = dt.utcnow()
    for x in xrange(len(dates)):
        dates[x] = now - dtd(minutes=130 - x)
        with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(dates[x])):
            library.write(symbol, ts1, prune_previous_version=False)
    assert len(list(library.list_versions(symbol))) == 3

    library.write(symbol, ts1, prune_previous_version=True)
    assert len(list(library.list_versions(symbol))) >= 2

    versions = list(library.list_versions(symbol))
    for i, x in enumerate([4, 3]):
        assert versions[i]['symbol'] == symbol
        assert versions[i]['date'] >= dates[i]
        assert versions[i]['version'] == x
Пример #25
0
def test_list_version(library):
    assert len(list(library.list_versions(symbol))) == 0
    dates = [None, None, None]
    now = dt.utcnow()
    for x in xrange(len(dates)):
        dates[x] = now - dtd(minutes=130 - x)
        with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(dates[x])):
            library.write(symbol, ts1, prune_previous_version=False)
    assert len(list(library.list_versions(symbol))) == 3

    library.write(symbol, ts1, prune_previous_version=True)
    assert len(list(library.list_versions(symbol))) >= 2

    versions = list(library.list_versions(symbol))
    for i, x in enumerate([4, 3]):
        assert versions[i]['symbol'] == symbol
        assert versions[i]['date'] >= dates[i]
        assert versions[i]['version'] == x
Пример #26
0
def test_list_version_latest_only(library):
    assert len(list(library.list_versions(symbol))) == 0
    dates = [None, None, None]
    now = dt.utcnow().replace(tzinfo=mktz('UTC'))
    for x in six.moves.xrange(len(dates)):
        dates[x] = now - dtd(minutes=20 - x)
        with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(dates[x])):
            library.write(symbol, ts1, prune_previous_version=False)
    assert len(list(library.list_versions(symbol))) == 3

    library.write(symbol, ts1, prune_previous_version=True)
    assert len(list(library.list_versions(symbol, latest_only=True))) == 1

    versions = list(library.list_versions(symbol))
    for i, x in enumerate([4, ]):
        assert versions[i]['symbol'] == symbol
        assert versions[i]['date'] >= dates[i]
        assert versions[i]['version'] == x
Пример #27
0
def test_list_version_latest_only(library):
    assert len(list(library.list_versions(symbol))) == 0
    dates = [None, None, None]
    now = dt.utcnow().replace(tzinfo=mktz('UTC'))
    for x in six.moves.xrange(len(dates)):
        dates[x] = now - dtd(minutes=20 - x)
        with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(dates[x])):
            library.write(symbol, ts1, prune_previous_version=False)
    assert len(list(library.list_versions(symbol))) == 3

    library.write(symbol, ts1, prune_previous_version=True)
    assert len(list(library.list_versions(symbol, latest_only=True))) == 1

    versions = list(library.list_versions(symbol))
    for i, x in enumerate([4, ]):
        assert versions[i]['symbol'] == symbol
        assert versions[i]['date'] >= dates[i]
        assert versions[i]['version'] == x
Пример #28
0
def holidaysForYear(year):
    # Generate public holidays (UK) for a given year
    # Christmas day/boxing day falling on weekend isn't included (assumed standard weekend)
    holidays = []
    # New year:
    holidays.append(dtd(year, 01, 01))
    # If 2 or 3 january is a monday, this is the bank holiday
    jan2 = dtd(year, 01, 02)
    jan3 = dtd(year, 01, 03)
    if jan2.weekday() == 0:
        holidays.append(jan2)
    if jan3.weekday() == 0:
        holidays.append(jan3)

    # Get easter monday and friday bank holidays from lookup function
    holidays.extend(easterLookup(year))
    # Early and late may
    may1 = dtd(year, 05, 01)
    may1 = may1 if may1.weekday() is 0 else may1 + timedelta(7 -
                                                             may1.weekday())
    holidays.append(may1)
    holidays.append(dtd(year, 5, 31) - timedelta(dtd(year, 5, 31).weekday()))
    # Final monday in August
    holidays.append(dtd(year, 8, 31) - timedelta(dtd(year, 8, 31).weekday()))
    # Christmas bank holidays. Only add if on a week, because weekends are weekends anyway
    dec25 = dtd(year, 12, 25)
    dec26 = dtd(year, 12, 26)
    if dec25.weekday() < 6:  # only include if not Sunday
        holidays.append(dec25)
    if dec26.weekday() < 6:
        holidays.append(dec26)

    # If december 28 is a monday or tuesday, it must be displaced bank holiday because xmas day and/or boxing day fell on weekend
    dec27 = dtd(year, 12, 27)
    dec28 = dtd(year, 12, 28)
    if dec28.weekday() < 2:
        holidays.append(dec28)
    if dec27.weekday() < 2:
        holidays.append(dec27)
    return holidays
Пример #29
0
def test_cleanup_orphaned_chunks(mongo_host, library, data, dry_run):
    """
    Check that we do / don't cleanup chunks based on the dry-run
    """
    yesterday = dt.utcnow() - dtd(days=1, seconds=1)
    _id = bson.ObjectId.from_datetime(yesterday)
    with patch("bson.ObjectId", return_value=_id):
        library.write('symbol', data, prune_previous_version=False)

    # Number of chunks
    chunk_count = library._collection.count()
    # Remove the version document ; should cleanup
    library._collection.versions.delete_one({'_id': _id})

    # No cleanup on dry-run
    if dry_run:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host)
        assert library._collection.count() == chunk_count
    else:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host, '-f')
        assert library._collection.count() == 0
Пример #30
0
def test_cleanup_noop(mongo_host, library, data, dry_run):
    """
    Check that we do / don't cleanup chunks based on the dry-run
    """
    yesterday = dt.utcnow() - dtd(days=1, seconds=1)
    _id = bson.ObjectId.from_datetime(yesterday)
    with patch("bson.ObjectId", return_value=_id):
        library.write('symbol', data, prune_previous_version=False)

    # Number of chunks
    chunk_count = library._collection.count()

    # No cleanup on dry-run
    if dry_run:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host)
        assert library._collection.count() == chunk_count
        assert repr(library.read('symbol').data) == repr(data)
    else:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host, '-f')
        assert library._collection.count() == chunk_count
        assert repr(library.read('symbol').data) == repr(data)
Пример #31
0
def test_cleanup_noop(mongo_host, library, data, dry_run):
    """
    Check that we do / don't cleanup chunks based on the dry-run
    """
    yesterday = dt.utcnow() - dtd(days=1, seconds=1)
    _id = bson.ObjectId.from_datetime(yesterday)
    with patch("bson.ObjectId", return_value=_id):
        library.write('symbol', data, prune_previous_version=False)

    # Number of chunks
    chunk_count = mongo_count(library._collection)

    # No cleanup on dry-run
    if dry_run:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host)
        assert mongo_count(library._collection) == chunk_count
        assert repr(library.read('symbol').data) == repr(data)
    else:
        run_as_main(main, '--library', 'user.library', '--host', mongo_host, '-f')
        assert mongo_count(library._collection) == chunk_count
        assert repr(library.read('symbol').data) == repr(data)
Пример #32
0
def test_cleanup_orphaned_chunk_doesnt_break_versions(mongo_host, library, data):
    """
    Check that a chunk pointed to by more than one version, aren't inadvertently cleared
    """
    yesterday = dt.utcnow() - dtd(days=1, seconds=1)
    _id = bson.ObjectId.from_datetime(yesterday)
    with patch("bson.ObjectId", return_value=_id):
        library.write('symbol', data, prune_previous_version=False)

    # Re-Write the data again
    # Write a whole new version rather than going down the append path...
    #     - we want two self-standing versions, the removal of one shouldn't break the other... 
    with patch('arctic.store._ndarray_store._APPEND_COUNT', 0):
        library.write('symbol', data, prune_previous_version=False)
    library._delete_version('symbol', 1)
    library._collection.versions.delete_one({'_id': _id})
    assert repr(library.read('symbol').data) == repr(data)

    run_as_main(main, '--library', 'user.library', '--host', mongo_host, '-f')
    assert repr(library.read('symbol').data) == repr(data)
    library.delete('symbol')
    assert library._collection.versions.count() == 0
Пример #33
0
def test_prunes_doesnt_prune_snapshots_fully_different_tss(library):
    coll = library._collection

    a = ts1
    b = ts2
    c = b.copy()
    c.index = [i + dtd(days=365) for i in c.index]
    c.index.name = b.index.name
    now = dt.utcnow()
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=125))):
        library.write(symbol, a, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=123))):
        library.write(symbol, b, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=122))):
        library.write(symbol, c, prune_previous_version=False)
    library.snapshot('snap')
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=121))):
        library.write(symbol, c, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=118))):
        library.write(symbol, c, prune_previous_version=False)
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=119))):
        library.write(symbol, c, prune_previous_version=False)
    assert coll.versions.count() == 6

    # Prunes all versions older than the most recent version that's older than 10 mins
    library.write(symbol, c, prune_previous_version=True)
    assert coll.versions.count() == 5
    assert_frame_equal(library.read(symbol, as_of='snap').data, c)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, c)
    assert_frame_equal(library.read(symbol, as_of=6).data, c)
    assert_frame_equal(library.read(symbol, as_of=7).data, c)

    library.delete_snapshot('snap')
    assert coll.versions.count() == 5
    library.write(symbol, c, prune_previous_version=True)
    assert_frame_equal(library.read(symbol, as_of=4).data, c)
    assert_frame_equal(library.read(symbol, as_of=5).data, c)
    assert_frame_equal(library.read(symbol, as_of=6).data, c)
    assert_frame_equal(library.read(symbol, as_of=7).data, c)
Пример #34
0
def test_read_as_of_NotNaive():
    # When we do a read, with naive as_of, that as_of is treated in London Time.
    vs = create_autospec(VersionStore, instance=True,
                     _versions=Mock(), _allow_secondary=False)
    VersionStore._read_metadata(vs, 'symbol', dt(2013, 4, 1, 9, 0, tzinfo=mktz('Europe/Paris')))
    versions = vs._versions.with_options.return_value
    versions.find_one.assert_called_once_with({'symbol':'symbol', '_id':
                                              {'$lt': bson.ObjectId.from_datetime(dt(2013, 4, 1, 9, 0, tzinfo=mktz('Europe/Paris')) + dtd(seconds=1))}},
                                             sort=[('_id', pymongo.DESCENDING)])
def easterLookup(year):
    # Return easter bank holiday dates for a year
    # Hard coded because parliament didn't set a constant date
    # Easter Monday bank holidays
    easters = {}
    easters[2000] = [dtd(2000, 0o4, 24)]
    easters[2001] = [dtd(2001, 0o4, 16)]
    easters[2002] = [dtd(2002, 0o4, 1)]
    easters[2003] = [dtd(2003, 0o4, 21)]
    easters[2004] = [dtd(2004, 0o4, 12)]
    easters[2005] = [dtd(2005, 0o3, 28)]
    easters[2006] = [dtd(2006, 0o4, 17)]
    easters[2007] = [dtd(2007, 0o4, 9)]
    easters[2008] = [dtd(2008, 0o3, 24)]
    easters[2009] = [dtd(2009, 0o4, 13)]
    easters[2010] = [dtd(2010, 0o4, 0o5)]
    easters[2011] = [dtd(2011, 0o4, 25)]
    easters[2012] = [dtd(2012, 0o4, 9)]
    easters[2013] = [dtd(2013, 0o4, 0o1)]
    easters[2014] = [dtd(2014, 0o4, 21)]
    easters[2015] = [dtd(2015, 0o4, 0o6)]
    easters[2016] = [dtd(2016, 0o3, 28)]
    easters[2017] = [dtd(2017, 0o4, 17)]
    easters[2018] = [dtd(2018, 0o4, 0o2)]
    easters[2019] = [dtd(2019, 0o4, 22)]
    easters[2020] = [dtd(2020, 0o4, 13)]

    # Can programatically get Good Friday from the above
    for y in list(easters.keys()):
        easters[y].append(easters[y][0] - timedelta(3))

    return easters[year]
Пример #36
0
def test_prunes_previous_version_append_interaction(library):
    ts = ts1
    ts2 = ts1.append(
        pd.DataFrame(index=[
            ts.index[-1] + dtd(days=1),
            ts.index[-1] + dtd(days=2),
        ],
                     data=[3.7, 3.8],
                     columns=['near']))
    ts2.index.name = ts1.index.name
    ts3 = ts.append(
        pd.DataFrame(
            index=[ts2.index[-1] + dtd(days=1), ts2.index[-1] + dtd(days=2)],
            data=[4.8, 4.9],
            columns=['near']))
    ts3.index.name = ts1.index.name
    ts4 = ts
    ts5 = ts2
    ts6 = ts3
    now = dt.utcnow()
    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=130)),
               from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol).data)

    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=129)),
               from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts2, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol, as_of=1).data)
    assert_frame_equal(ts2, library.read(symbol).data)

    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=128)),
               from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts3, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol, as_of=1).data)
    assert_frame_equal(ts2, library.read(symbol, as_of=2).data)
    assert_frame_equal(ts3, library.read(symbol).data)

    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=127)),
               from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts4, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol, as_of=1).data)
    assert_frame_equal(ts2, library.read(symbol, as_of=2).data)
    assert_frame_equal(ts3, library.read(symbol, as_of=3).data)
    assert_frame_equal(ts4, library.read(symbol).data)

    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now -
                                                        dtd(minutes=126)),
               from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts5, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol, as_of=1).data)
    assert_frame_equal(ts2, library.read(symbol, as_of=2).data)
    assert_frame_equal(ts3, library.read(symbol, as_of=3).data)
    assert_frame_equal(ts4, library.read(symbol, as_of=4).data)
    assert_frame_equal(ts5, library.read(symbol).data)

    with patch("bson.ObjectId",
               return_value=bson.ObjectId.from_datetime(now),
               from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts6, prune_previous_version=True)

    with pytest.raises(NoDataFoundException):
        library.read(symbol, as_of=1)
    with pytest.raises(NoDataFoundException):
        library.read(symbol, as_of=2)
    with pytest.raises(NoDataFoundException):
        library.read(symbol, as_of=3)
    assert_frame_equal(ts5, library.read(symbol, as_of=5).data)
    assert_frame_equal(ts6, library.read(symbol).data)
Пример #37
0
def easterLookup(year):
    # Return easter bank holiday dates for a year
    # Hard coded because parliament didn't set a constant date
    # Easter Monday bank holidays
    easters = {}
    easters[2000] = [dtd(2000, 04, 24)]
    easters[2001] = [dtd(2001, 04, 16)]
    easters[2002] = [dtd(2002, 04, 1)]
    easters[2003] = [dtd(2003, 04, 21)]
    easters[2004] = [dtd(2004, 04, 12)]
    easters[2005] = [dtd(2005, 03, 28)]
    easters[2006] = [dtd(2006, 04, 17)]
    easters[2007] = [dtd(2007, 04, 9)]
    easters[2008] = [dtd(2008, 03, 24)]
    easters[2009] = [dtd(2009, 04, 13)]
    easters[2010] = [dtd(2010, 04, 05)]
    easters[2011] = [dtd(2011, 04, 25)]
    easters[2012] = [dtd(2012, 04, 9)]
    easters[2013] = [dtd(2013, 04, 01)]
    easters[2014] = [dtd(2014, 04, 21)]
    easters[2015] = [dtd(2015, 04, 06)]
    easters[2016] = [dtd(2016, 03, 28)]
    easters[2017] = [dtd(2017, 04, 17)]
    easters[2018] = [dtd(2018, 04, 02)]
    easters[2019] = [dtd(2019, 04, 22)]
    easters[2020] = [dtd(2020, 04, 13)]

    # Can programatically get Good Friday from the above
    for y in easters.keys():
        easters[y].append(easters[y][0] - timedelta(3))

    return easters[year]
Пример #38
0
def holidaysForYear(year):
    # Generate public holidays (UK) for a given year
    # Christmas day/boxing day falling on weekend isn't included (assumed standard weekend)
    holidays = []
    # New year:
    holidays.append(dtd(year, 01, 01))
    # If 2 or 3 january is a monday, this is the bank holiday
    jan2 = dtd(year, 01, 02)
    jan3 = dtd(year, 01, 03)
    if jan2.weekday() == 0:
        holidays.append(jan2)
    if jan3.weekday() == 0:
        holidays.append(jan3)

    # Get easter monday and friday bank holidays from lookup function
    holidays.extend(easterLookup(year))
    # Early and late may
    may1 = dtd(year, 05, 01)
    may1 = may1 if may1.weekday() is 0 else may1 + timedelta(7 - may1.weekday())
    holidays.append(may1)
    holidays.append(dtd(year, 5, 31) - timedelta(dtd(year, 5, 31).weekday()))
    # Final monday in August
    holidays.append(dtd(year, 8, 31) - timedelta(dtd(year, 8, 31).weekday()))
    # Christmas bank holidays. Only add if on a week, because weekends are weekends anyway
    dec25 = dtd(year, 12, 25)
    dec26 = dtd(year, 12, 26)
    if dec25.weekday() < 6:  # only include if not Sunday
        holidays.append(dec25)
    if dec26.weekday() < 6:
        holidays.append(dec26)

    # If december 28 is a monday or tuesday, it must be displaced bank holiday because xmas day and/or boxing day fell on weekend
    dec27 = dtd(year, 12, 27)
    dec28 = dtd(year, 12, 28)
    if dec28.weekday() < 2:
        holidays.append(dec28)
    if dec27.weekday() < 2:
        holidays.append(dec27)
    return holidays
Пример #39
0
def test_prunes_previous_version_append_interaction(library):
    ts = ts1
    ts2 = ts1.append(pd.DataFrame(index=[ts.index[-1] + dtd(days=1),
                                         ts.index[-1] + dtd(days=2), ],
                                  data=[3.7, 3.8],
                                  columns=['near']))
    ts2.index.name = ts1.index.name
    ts3 = ts.append(pd.DataFrame(index=[ts2.index[-1] + dtd(days=1),
                                        ts2.index[-1] + dtd(days=2)],
                                 data=[4.8, 4.9],
                                 columns=['near']))
    ts3.index.name = ts1.index.name
    ts4 = ts
    ts5 = ts2
    ts6 = ts3
    now = dt.utcnow()
    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=130)),
                                from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol).data)

    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=129)),
                                from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts2, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol, as_of=1).data)
    assert_frame_equal(ts2, library.read(symbol).data)

    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=128)),
                                from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts3, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol, as_of=1).data)
    assert_frame_equal(ts2, library.read(symbol, as_of=2).data)
    assert_frame_equal(ts3, library.read(symbol).data)

    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=127)),
                                from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts4, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol, as_of=1).data)
    assert_frame_equal(ts2, library.read(symbol, as_of=2).data)
    assert_frame_equal(ts3, library.read(symbol, as_of=3).data)
    assert_frame_equal(ts4, library.read(symbol).data)

    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now - dtd(minutes=126)),
                                from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts5, prune_previous_version=False)
    assert_frame_equal(ts, library.read(symbol, as_of=1).data)
    assert_frame_equal(ts2, library.read(symbol, as_of=2).data)
    assert_frame_equal(ts3, library.read(symbol, as_of=3).data)
    assert_frame_equal(ts4, library.read(symbol, as_of=4).data)
    assert_frame_equal(ts5, library.read(symbol).data)

    with patch("bson.ObjectId", return_value=bson.ObjectId.from_datetime(now),
                                from_datetime=bson.ObjectId.from_datetime):
        library.write(symbol, ts6, prune_previous_version=True)

    with pytest.raises(NoDataFoundException):
        library.read(symbol, as_of=1)
    with pytest.raises(NoDataFoundException):
        library.read(symbol, as_of=2)
    with pytest.raises(NoDataFoundException):
        library.read(symbol, as_of=3)
    assert_frame_equal(ts5, library.read(symbol, as_of=5).data)
    assert_frame_equal(ts6, library.read(symbol).data)