Exemple #1
0
    def __init__(self, name: str, path: str):
        """Create a PystoreConnector object that points to a Pystore.

        Parameters
        ----------
        name : str
            name of the store
        path : str
            path to the pystore directory
        """
        try:
            import pystore
        except ModuleNotFoundError as e:
            print("Install pystore, follow instructions at "
                  "https://github.com/ranaroussi/pystore#dependencies")
            raise e
        self.name = name
        self.path = path
        pystore.set_path(self.path)
        self.store = pystore.store(self.name)
        self.libs: dict = {}
        self._initialize()
        self.models = ModelAccessor(self)
        # for older versions of PastaStore, if oseries_models library is empty
        # populate oseries - models database
        self._update_all_oseries_model_links()
def run():
    #data = get_data("BTCEUR", Client.KLINE_INTERVAL_1DAY, "01-01-2020", "05-10-2020")

    symbols = ["BTCEUR"]
    periods = [
        Client.KLINE_INTERVAL_1DAY, Client.KLINE_INTERVAL_1HOUR,
        Client.KLINE_INTERVAL_15MINUTE
    ]
    years = [2018, 2020]

    pystore.set_path('./pystore')
    store = pystore.store('binance')

    #print(read_data("BTCUSDT", Client.KLINE_INTERVAL_15MINUTE))

    for symbol in symbols:
        collection = store.collection(symbol)
        for period in periods:
            data = get_data(symbol, period, "01-01-{}".format(years[0]),
                            "12-31-{}".format(years[1]))
            if not data.empty:
                #print(data)

                collection.write(period, data, overwrite=True)

                print("saved {} {}".format(symbol, period))
Exemple #3
0
def delete_pystore(path: str,
                   name: str,
                   libraries: Optional[List[str]] = None) -> None:
    """Delete libraries from pystore.

    Parameters
    ----------
    path : str
        path to pystore
    name : str
        name of the pystore
    libraries : Optional[List[str]], optional
        list of library names to delete, by default None which deletes
        all libraries
    """
    try:
        import pystore
    except ModuleNotFoundError as e:
        print("Please install `pystore`!")
        raise e
    print(f"Deleting pystore: '{name}' ...", end="")
    pystore.set_path(path)
    if libraries is None:
        pystore.delete_store(name)
        print(" Done!")
    else:
        store = pystore.store(name)
        for lib in libraries:
            print()
            store.delete_collection(lib)
            print(f" - deleted: {lib}")
Exemple #4
0
def prj(request):
    if request.param == "arctic":
        connstr = "mongodb://localhost:27017/"
        name = "test_project"
        arc = arctic.Arctic(connstr)
        if name in [lib.split(".")[0] for lib in arc.list_libraries()]:
            connector = pst.ArcticConnector(name, connstr)
            prj = pst.PastaStore(name, connector)
        else:
            connector = pst.ArcticConnector(name, connstr)
            prj = initialize_project(connector)
    elif request.param == "pystore":
        name = "test_project"
        path = "./tests/data/pystore"
        pystore.set_path(path)
        if name in pystore.list_stores():
            connector = pst.PystoreConnector(name, path)
            prj = pst.PastaStore(name, connector)
        else:
            connector = pst.PystoreConnector(name, path)
            prj = initialize_project(connector)
    elif request.param == "dict":
        name = "test_project"
        connector = pst.DictConnector(name)
        prj = initialize_project(connector)
    prj.type = request.param  # added here for defining test dependencies
    yield prj
Exemple #5
0
def data_from_files(data_dir, meter_id):
    """Read data from raw data dumped JSON files."""
    from .config import read_config
    import gzip
    import os
    import json
    import pystore
    from pathlib import Path

    config = read_config()
    name = f"power_{meter_id}"
    pystore.set_path(
        Path(config.file_location.data_dir).expanduser().as_posix())
    for f in sorted(os.listdir(data_dir)):
        if f.startswith("discovergy_data"):
            with gzip.open(f"{data_dir}/{f}") as fh:
                data = json.load(fh)
            df = raw_to_df(data=data)
            print(f)
            write_data_to_pystore(
                config=config,
                data_frames=split_df_by_day(df=df),
                name=name,
                metadata={"meter_id": meter_id},
            )
def read_data(symbol, interval):
    pystore.set_path('./pystore')
    store = pystore.store('binance')
    collection = store.collection(symbol)
    data = collection.item(interval)

    return data.to_pandas()
Exemple #7
0
 def __init__(self, user_settings, storeName='AInvesting'):
     # List stores
     # pystore.list_stores()
     pystore.set_path(getDatabasePath(user_settings))
     # Connect to datastore (create it if not exist)
     self.store = pystore.store(storeName)
     self.cacheDict = {}  # Cache
     pass
Exemple #8
0
def test_obs_from_pystore_item():
    import pystore
    pystore.set_path("./tests/data/2019-Pystore-test")
    store = pystore.store("test_pystore")
    coll = store.collection(store.collections[0])
    item = coll.item(list(coll.list_items())[0])
    o = obs.GroundwaterObs.from_pystore_item(item)
    return o
Exemple #9
0
def set_pystore_path(pystore_path):
    """Set pystore path

    Parameters
    ----------
    pystore_path : str
        path to location with stores
    """
    pystore.set_path(pystore_path)
Exemple #10
0
 def __init__(self, collection_name, data_location="../excels", pystore_path='/home/nimac/.pystore', store_name='tradion', item_name='ALL',
              pystore_source="default_source", name="default_name"):
     pystore.set_path(pystore_path)
     self.store_name = store_name
     self.pystore_source = pystore_source
     self.collection_name = collection_name
     self.item_name = item_name
     self.name = name
     self.data_location = data_location
 def __init__(self,
              pystore_path='/home/nimac/.pystore',
              store_name='tradion',
              collection_name='boors',
              item_name='ALL'):
     pystore.set_path(pystore_path)
     self.store_name = store_name
     self.collection_name = collection_name
     self.item_name = item_name
     self.__is_scaled = {}
Exemple #12
0
 def __init__(self, directory):
     super().__init__()
     import pystore
     self.directory = directory
     pystore.set_path(self.directory)
     self.ohlcv_store = pystore.store("OHLCV")
     self.fd_store = pystore.store("FD")
     self.ohlcv_eod_collection = self.ohlcv_store.collection("EOD")
     self.fd_q_collection = self.fd_store.collection("Q")
     self.available_symbols = self.list_symbols()
Exemple #13
0
def update_store(filename='Data.csv', name_map=name_map, path='./db',
                 date_format='%d/%m/%Y', overwrite=False):
    pystore.set_path(path)
    accounts_store = pystore.store('accounts')
    with open('accounts.txt') as accounts:
        account_map = dict()
        for account in accounts:
            name, number = account.split(' ')
            account_map[name] = int(number)

    # read in new data
    data = pd.read_csv(filename)
    # rename columns to match internal representation
    data.rename(columns=name_map, inplace=True)
    columns = name_map.values()

    # change data strings into efficient numerical format
    data.loc[:, DATE] = pd.to_datetime(data.loc[:, DATE], format=date_format)
    data = data.loc[:, [*columns]] # use only desired columns
    data.fillna(0, inplace=True) # fill any NaN/blank values with 0
    # remove the debit column, if there is one
    if DEBIT in columns:
        # merge the debit column into the credit column and remove
        data.loc[:, CREDIT] -= data.loc[:, DEBIT]
        data.drop(columns=DEBIT, inplace=True)

    # finish processing and save data into separate accounts
    for name, number in account_map.items():
        # add as a collection if not already present, assign for convenience
        collection = accounts_store.collection(name)
        if not overwrite:
            if 'transactions' in collection.list_items():
                write = collection.append
            else:
                write = collection.write
        else:
            write = lambda *args, **kw : collection.write(*args, **kw,
                                                          overwrite=overwrite)
        if 'transactions' in collection.list_items():
            metadata = collection.item('transactions').metadata
        else:
            metadata = dict()
        metadata.update(number=number)
        # can we somehow mark the index as pre-sorted??
        write('transactions', data[data[ACCOUNT_NO] == account_map[name]]
                              .drop(columns=ACCOUNT_NO)
                              .set_index(DATE), metadata=metadata,
        )

    return accounts_store
Exemple #14
0
def get_pystore(data_dir, store_label='iex_data_store'):
    """
    get pystore

    example:
        store = get_pystore(data_dir)

    :param data_dir:
    :param store_label:
    :return:
    """
    pystore.set_path(data_dir.as_posix())
    store = pystore.store(store_label)
    return store
Exemple #15
0
    def __init__(self, name, root, config, subreddit):
        self.name = name
        self.root = root
        self.config = config
        self.subreddit = subreddit

        # datastore
        db.set_path(os.path.join(self.root, 'data', 'store'))
        self.datastore = db.store(self.subreddit, 'fastparquet')

        # collection
        self.collection = self.datastore.collection(self.name)
        self.path = os.path.join(self.datastore.datastore,
                                 self.collection.collection)
Exemple #16
0
    def to_pystore(self, store_name, pystore_path, groupby, item_name=None,
                   overwrite=False):
        """Write timeseries and metadata to Pystore format. Series are
        grouped by 'groupby'. Each group is a Collection, each series within
        that group an Item.

        Parameters
        ----------
        store_name : str
            name of the store
        pystore_path : str
            path where store should be saved
        groupby : str
            column name to groupby, (for example, group by location,
            which would create a collection per location, and write
            all timeseries at that location as Items into that Collection).
        item_name : str, optional
            name of column to use as item name, default is None, Item then
            takes name from obs.name
        overwrite : bool, optional
            if True overwrite current data in store, by default False
        """
        import pystore
        pystore.set_path(pystore_path)
        store = pystore.store(store_name)
        for name, group in self.groupby(by=groupby):
            # Access a collection (create it if not exist)
            collection = store.collection(name, overwrite=overwrite)
            for i, o in enumerate(group.obs):
                imeta = o.meta.copy()
                if 'datastore' in imeta.keys():
                    imeta['datastore'] = str(imeta['datastore'])
                # add extra columns to item metadata
                for icol in group.columns:
                    if icol != "obs" and icol != 'meta':
                        # check if type is numpy integer
                        # numpy integers are not json serializable
                        if isinstance(group.iloc[i].loc[icol], np.integer):
                            imeta[icol] = int(group.iloc[i].loc[icol])
                        else:
                            imeta[icol] = group.iloc[i].loc[icol]
                if item_name is None:
                    name = o.name
                else:
                    name = o.meta[item_name]
                collection.write(name, o, metadata=imeta,
                                 overwrite=overwrite)
Exemple #17
0
def main(config: Box) -> None:
    """Entry point for the data poller."""
    loop = asyncio.get_event_loop()
    # Set pystore directory
    pystore.set_path(
        Path(config.file_location.data_dir).expanduser().as_posix())
    # Add all tasks to the event loop.
    task_match = re.compile(r"^.*_task$")
    for attr in globals().keys():
        if task_match.match(attr):
            asyncio.ensure_future(globals()[attr](config=config, loop=loop))
    try:
        loop.run_forever()
    except KeyboardInterrupt:
        log.info("Polling Discovergy and friends was ended by <Ctrl>+<C>.")
        sys.exit(0)
    except Exception as e:
        log.error(f"While running the poller event loop we caught {e}.")
    finally:
        log.info("Closing event loop")
        loop.close()
Exemple #18
0
def pstore(request):
    if request.param == "arctic":
        connstr = "mongodb://localhost:27017/"
        name = "test_project"
        connector = pst.ArcticConnector(name, connstr)
    elif request.param == "pystore":
        name = "test_project"
        path = "./tests/data/pystore"
        pystore.set_path(path)
        connector = pst.PystoreConnector(name, path)
    elif request.param == "dict":
        name = "test_project"
        connector = pst.DictConnector(name)
    elif request.param == "pas":
        name = "test_project"
        connector = pst.PasConnector(name, "./tests/data/pas")
    else:
        raise ValueError("Unrecognized parameter!")
    pstore = initialize_project(connector)
    pstore.type = request.param  # added here for defining test dependencies
    yield pstore
Exemple #19
0
    def __init__(self, name: str, path: str):
        """Create a PystoreConnector object that points to a Pystore.

        Parameters
        ----------
        name : str
            name of the store
        path : str
            path to the pystore directory
        """
        try:
            import pystore
        except ModuleNotFoundError as e:
            print("Install pystore, follow instructions at "
                  "https://github.com/ranaroussi/pystore#dependencies")
            raise e
        self.name = name
        self.path = path
        pystore.set_path(self.path)
        self.store = pystore.store(self.name)
        self.libs: dict = {}
        self._initialize()
Exemple #20
0
def delete_pystore_connector(conn=None,
                             path: Optional[str] = None,
                             name: Optional[str] = None,
                             libraries: Optional[List[str]] = None) -> None:
    """Delete libraries from pystore.

    Parameters
    ----------
    conn : PystoreConnector, optional
        PystoreConnector object
    path : str, optional
        path to pystore
    name : str, optional
        name of the pystore
    libraries : Optional[List[str]], optional
        list of library names to delete, by default None which deletes
        all libraries
    """
    import pystore

    if conn is not None:
        name = conn.name
        path = conn.path
    elif name is None or path is None:
        raise ValueError("Please provide 'name' and 'path' OR 'conn'!")

    print(f"Deleting PystoreConnector database: '{name}' ...", end="")
    pystore.set_path(path)
    if libraries is None:
        pystore.delete_store(name)
        print(" Done!")
    else:
        store = pystore.store(name)
        for lib in libraries:
            print()
            store.delete_collection(lib)
            print(f" - deleted: {lib}")
Exemple #21
0
"""
Keeping low-usage historic time-series data in parquet form on my laptop (via pystore) to avoid paying cloud hosting fees
    - https://pypi.org/project/PyStore/

Keeping high-usage daily data in MongoDB Atlas

All exposed using an unified Flask API (likely GraphQL via Graphene) - https://github.com/graphql-python/graphene-mongo/blob/master/docs/tutorial.rst
"""

import pystore
import pandas as pd

pystore.set_path('../data/historic/')

instruments = pystore.store('instruments')

stocks = instruments.collection('stocks')
options = instruments.collection('options')

stocks.delete_item("AAPL")

#stocks.write('AAPL', aapl[:-1], metadata={'source': 'Quandl'})
#stocks.append('AAPL', aapl[2:3], npartitions=stocks.item("AAPL").data.npartitions)
"""
use snapshots to protect data - e.g. 

stocks.create_snapshot('snapshot_name')
snap_df = stocks.item('AAPL', snapshot='snapshot_name')
collection.write('AAPL', snap_df.to_pandas(),
                 metadata={'source': 'Quandl'},
                 overwrite=True)
Exemple #22
0
from timeit import default_timer as timer
from functools import lru_cache
from collections import OrderedDict

import numpy as np
import pandas as pd
import pandas_datareader
import pandas_datareader.data as web
import pystore
import unidecode
import requests_cache

logging.getLogger(__name__).addHandler(logging.NullHandler())

storage_path = os.path.expanduser("~/.prcc")
pystore.set_path(storage_path)
collection = pystore.store("data").collection("all")

requests_cache.core.install_cache(os.path.join(storage_path, "cache"),
                                  "sqlite",
                                  expire_after=86400)

_last_api_call = 0.0
_b3_indices = {
    # Índices Amplos
    "ibovespa": "IBOV",
    "ibrx100": "IBRX",
    "ibrx50": "IBXL",
    "ibra": "IBRA",
    # Índices de Governança
    "igcx": "IGCX",
import quandl
import pystore
import time

p = "./share/pystore"

pystore.set_path(p)

store = pystore.store('quandl_datastore')

it = 'CHRIS'

# Access a collection (create it if not exist)
collection = store.collection(it)
# List all collections in the datastore
print(store.list_collections())
# returns ['NASDAQ.EOD']

jt1 = 'CME_0D1'
jt2 = 'CME_0D2'

#qd = quandl.get(it + '/' + jt, authtoken='TEbsCbsPnjdCqQCWJzCX')

# Store the data in the collection under AAPL
#collection.write(jt, qd[:-1], metadata={'source': 'Quandl'})

print(collection.list_items())

import time

start = time.time()
Exemple #24
0
        # if hsp[ticker] empty, or if it doesn't list prices
        return True
    else:  # if it has 'prices' but it's empty
        try:
            if not hsp[ticker]['prices']:
                return True
            else:
                return False
        except Exception as e:
            #~ print(e)
            logging.error(str(e))
            raise Exception


# prepare local storage/database
pystore.set_path(STOREPATH)

for sync_store, sync_url in zip(SYNC_STORES, SYNC_URLS):
    logging.info(
        '###############  Syncing {0} ###############'.format(sync_store))

    store = pystore.store(sync_store)

    # the various "tables" to store things in
    logging.info('Syncing {0}'.format(sync_store))
    collection_prices = store.collection('prices')
    if sync_store == 'INDEXES':
        for ind, n in zip(listofindexes, range(len(listofindexes))):
            printProgressBar(n,
                             len(listofindexes),
                             prefix='Estimated ',
Exemple #25
0
    parser.add_argument(
        "--expiration",
        default=None,
        type=str,
        help="Expiration in YYYYMMDD format. If none is provided, "
        "the system computes front expiration after next earnings")

    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(level=logging.INFO)
    else:
        logging.basicConfig(level=logging.WARNING)

    # Set storage path
    pystore.set_path(args.storage_path)
    store = pystore.store("ib")

    wrapper = RequestWrapper()
    wrapper.start_app(args.host, args.port, args.clientid)

    bar_size_str = args.bar_size.replace("mins", "m").replace(" ", "")
    duration = f"{args.duration[:-1]} {args.duration[-1]}"

    today = datetime.datetime.today()
    if args.expiration:
        expiration_dt = datetime.datetime.strptime(args.expiration, "%Y%m%d")

        if expiration_dt < today:
            query_time_str = expiration_dt.strftime("%Y%m%d %H:%M:%S")
        else:
 def __init__(self):
     self.lock = threading.Lock()
     pystore.set_path("./db/pystore")
     self.store = pystore.store("cellarstone_db")
Exemple #27
0
import pystore
import pandas as pd
import numpy as np

# my_df = pd.DataFrame(["AAPL", "GOOG"], columns=["Symbol"])
my_df = pd.DataFrame([["abcd", "abcd", "abcd", "abcd"]],
                     columns=["W", "X", "Y", "Z"])
my_df.index = range(1, len(my_df) + 1)
print(my_df)

pystore.set_path("./db/pystore")
store = pystore.store("mydatastore")
collection = store.collection("test")

collection.write("AAA", my_df, overwrite=True)

item = collection.item("AAA")
print(item.to_pandas())

# print(collection.item("AAA").to_pandas())

# my_new_df = pd.DataFrame(["UBER"], columns=["Symbol"])
my_new_df = pd.DataFrame([["xyz", "xyz", "xyz", "xyz"]],
                         columns=["W", "X", "Y", "Z"])
my_new_df.index = range(2, len(my_df) + 2)
print(my_new_df)

item = collection.item("AAA")
collection.append("AAA", my_new_df, npartitions=item.data.npartitions)

item = collection.item("AAA")
Exemple #28
0
import argparse

# project libs
# import keys
import logger

# external libs
import numpy as np
import pandas as pd
import cbpro

cb = cbpro.PublicClient()
# cb = cbpro.AuthenticatedClient(keys.apiKey, keys.apiSecret, keys.passphrase)
import pystore

pystore.set_path('./pystore')


def get_start_date(market):
    return {
        'ETH-USD': dt(year=2016, month=6, day=17),
        'BTC-USD': dt(year=2015, month=7, day=21)
    }[market]


def _gen_date_frames(start, until=None, width=300):
    """Generator for list of sequential time-window tuples.

    Args:
        start (datetime): Start datetime of list of dates
        until (datetime): Generate list until this date is hit
        else:
            value = 0
            cost = 0
            for stock in self._stocks.values():
                value += stock.get_value(stored_balance)
                cost += stock.get_cost(brokerage)
            return value / cost - 1

    def __str__(self):
        ''' '''
        sep = '\n  '
        return 'Stocks{}{sep}profit={} ({:.2f}%){sep}Stocks:\n{sep}'.format(
            super().__str__(), self.get_profit(),
            100*self.get_profit(relative=True), sep=sep) +\
            '\n\n{sep}'.join('{}{}{!s}'.format(symbol, sep, stock) for \
                             symbol, stock in self._stocks.items()) \
                .format(sep=sep)


if __name__ == '__main__':
    pystore.set_path('./db')
    store = pystore.store('accounts')
    savings = Account(store, 'savings')
    with open('API_KEY.txt') as magical_key:
        apikey = magical_key.readline()
    stocks = StocksAccount(store, 'stocks', apikey=apikey)
    print(savings)
    print()
    print(stocks)

Exemple #30
0
import matplotlib
from matplotlib import pyplot as plt
import numpy
import time
import subprocess
from os import path
import tempfile
#import tensorflow.compat.v1 as tf
#tf.disable_v2_behavior()
import tensorflow as tf
import pandas as pd
import pandas
import pystore
from data_extractor import Hardware

pystore.set_path('./data')
store = pystore.store('testdatastore')
collection = store.collection('sample.EOD')
df = collection.item('CPU-Util').to_pandas()

_PATH = path.dirname(__file__)
_CSV_FILE = path.join(_PATH, 'test-dataset.csv')


def bound_forecasts_between_0_and_100(ndarray):
    return numpy.clip(ndarray, 0, 100)


def upload_data(data_name, num_sample=100):

    util = Hardware()