Esempio n. 1
0
    def __init__(
        self,
        bridge_ip,
        logger,
        main_tbl_num,
        stats_table,
        next_table,
        scratch_table_num,
        session_rule_version_mapper,
    ):
        self._bridge_ip = bridge_ip
        self.logger = logger
        self.main_tbl_num = main_tbl_num
        self.stats_table = stats_table
        self.next_table = next_table
        self._scratch_tbl_num = scratch_table_num
        self._redirect_dict = RedirectDict()
        self._dns_cache = Memoizer({})
        self._redirect_port = get_service_config_value(
            'redirectd',
            'http_port',
            8080,
        )
        self._session_rule_version_mapper = session_rule_version_mapper

        self._cwf_args_set = False
        self._mac_rewrite_scratch = None
        self._internal_ip_allocator = None
        self._arpd_controller_fut = None
        self._arp_contoller = None
        self._egress_table = None
        self._bridge_mac = None
Esempio n. 2
0
    def test_32_update_memoized_backend_error(self, memoizer_set):
        settings.DEBUG = True
        logging.basicConfig()

        # disable logger (to avoid cluttering test output)
        logging.disable(logging.ERROR)

        memoizer = Memoizer()

        @memoizer.memoize()
        def f():
            return random.randrange(0, 100000)

        exception_raised = False
        try:
            self.memoizer.update_memoized(f)
        except Exception:
            exception_raised = True
        assert exception_raised

        settings.DEBUG = False
        memoizer_set.reset_mock()

        # re-enable logger
        logging.disable(logging.NOTSET)
Esempio n. 3
0
    def test_22_memoize_set_backend_error(self, memoizer_set):
        logging.basicConfig()

        # disable logger (to avoid cluttering test output)
        logging.disable(logging.ERROR)

        memoizer = Memoizer()

        @memoizer.memoize()
        def f():
            return random.randrange(0, 100000)

        cache_key = f.make_cache_key(f.uncached)

        old_value = f()
        memoizer_set.assert_called_with(cache_key,
                                        old_value,
                                        timeout=f.cache_timeout)

        memoizer_set.reset_mock()

        new_value = f()
        memoizer_set.assert_called_with(cache_key,
                                        new_value,
                                        timeout=f.cache_timeout)

        assert new_value != old_value

        # re-enable logger
        logging.disable(logging.NOTSET)
Esempio n. 4
0
    def test_24_delete_memoized_backend_error(self, memoizer_delete):
        logging.basicConfig()

        # disable logger (to avoid cluttering test output)
        logging.disable(logging.ERROR)

        memoizer = Memoizer()

        @memoizer.memoize()
        def f(*args, **kwargs):
            return random.randrange(0, 100000)

        cache_key = f.make_cache_key(f.uncached, 'a', 'b', c='c', d='d')

        old_value = f('a', 'b', c='c', d='d')

        memoizer.delete_memoized(f, 'a', 'b', c='c', d='d')
        memoizer_delete.assert_called_with(cache_key)

        new_value = f('a', 'b', c='c', d='d')

        assert new_value == old_value

        # re-enable logger
        logging.disable(logging.NOTSET)
Esempio n. 5
0
    def test_26_delete_memoized_verhash_backend_error(self, memoizer_delete):
        logging.basicConfig()

        # disable logger (to avoid cluttering test output)
        logging.disable(logging.ERROR)

        memoizer = Memoizer()

        @memoizer.memoize()
        def f():
            return random.randrange(0, 100000)

        _fname, _ = function_namespace(f)
        version_key = self.memoizer._memvname(_fname)

        result = f()

        assert f() == result
        assert memoizer.get(version_key) is not memoizer.default_cache_value

        memoizer.delete_memoized_verhash(f)

        memoizer_delete.assert_called_with(version_key)

        assert f() == result
        assert memoizer.get(version_key) is not memoizer.default_cache_value

        # re-enable logger
        logging.disable(logging.NOTSET)
def main():
    #%%
    data0 = {
        "x_train": pd.read_pickle(DATA_DIR + "/x_train.pkl"),
        "y_train": pd.read_pickle(DATA_DIR + "/y_train.pkl"),
        "x_test": pd.read_pickle(DATA_DIR + "/x_test.pkl"),
        "y_test": pd.read_pickle(DATA_DIR + "/y_test.pkl")
    }

    #%%
    param_grid_dic = OrderedDict([
        ("learning_rate", [0.05, 0.10, 0.15, 0.20, 0.25, 0.30]),
        ("max_depth", [3, 4, 5, 6, 8, 10, 12, 15]),
        ("min_child_weight", [1, 3, 5, 7]),
        ("gamma", [0.0, 0.1, 0.2, 0.3, 0.4]),
        ("colsample_bytree", [0.3, 0.4, 0.5, 0.7]),
    ])

    #%%
    train_fraction = 0.05
    test_fraction = 0.16
    data = subsample(data0, train_fraction, test_fraction)
    #%%
    memoization_path = DATA_DIR + "/" + "xgboost_memo%g" % train_fraction
    print("memoization_path= " + memoization_path)
    if not os.path.exists(memoization_path):
        os.mkdir(memoization_path)
    #%%
    fun = Memoizer(lambda param_dic: train_xgb(data, param_dic),
                   memoization_path)
    #%%
    grid_search(param_grid_dic, fun)
Esempio n. 7
0
 def __init__(self, bridge_ip, logger, tbl_num, next_table):
     self._bridge_ip = bridge_ip
     self.logger = logger
     self.tbl_num = tbl_num
     self.next_table = next_table
     self._redirect_dict = RedirectDict()
     self._dns_cache = Memoizer({})
     self._redirect_port = get_service_config_value(
         'redirectd', 'http_port', 8080)
Esempio n. 8
0
 def __init__(self, bridge_ip, logger, main_tbl_num, next_table,
              scratch_table_num, session_rule_version_mapper):
     self._bridge_ip = bridge_ip
     self.logger = logger
     self.main_tbl_num = main_tbl_num
     self.next_table = next_table
     self._scratch_tbl_num = scratch_table_num
     self._redirect_dict = RedirectDict()
     self._dns_cache = Memoizer({})
     self._redirect_port = get_service_config_value(
         'redirectd', 'http_port', 8080)
     self._session_rule_version_mapper = session_rule_version_mapper
Esempio n. 9
0
    def test_19_test_custom_repr_fn(self):

        custom_memoizer = Memoizer(repr_fn=lambda x: 'static')
        memoizer = Memoizer()

        class Test(object):
            def fn(self, arg=None):
                return 1

            cached_fn = memoizer.memoize(60)(fn)
            cached_fn_custom_repr = custom_memoizer.memoize(60)(fn)

            def __str__(self):
                return 'str'

            __unicode__ = __str__

            def __repr__(self):
                return 'repr'

        fake_obj = Test()
        fake_obj2 = Test()

        def _get_keys(obj):
            cache_key = obj.cached_fn.make_cache_key(obj.fn, obj, 123)
            custom_cache_key = obj.cached_fn_custom_repr.make_cache_key(
                obj.fn, obj, 123)

            return cache_key, custom_cache_key

        cache_key, custom_cache_key = _get_keys(fake_obj)
        assert cache_key != custom_cache_key

        cache_key2, custom_cache_key2 = _get_keys(fake_obj2)
        assert cache_key == cache_key2
        assert custom_cache_key == custom_cache_key2

        custom_cache_key3 = fake_obj.cached_fn_custom_repr.make_cache_key(
            fake_obj.fn, fake_obj, ['test'])
        assert custom_cache_key == custom_cache_key3
Esempio n. 10
0
    def test_17_memoize_none_value(self):
        self.memoizer = Memoizer()

        @self.memoizer.memoize()
        def foo():
            return None

        cache_key = foo.make_cache_key(foo.uncached)
        assert (self.memoizer.get(cache_key) is
                self.memoizer.default_cache_value)
        result = foo()
        assert result is None
        assert self.memoizer.get(cache_key) is None

        self.memoizer.delete_memoized(foo)
        cache_key = foo.make_cache_key(foo.uncached)
        assert (self.memoizer.get(cache_key) is
                self.memoizer.default_cache_value)
Esempio n. 11
0
    def test_31_update_memoized_backend_error(self, memoizer_set):

        logging.basicConfig()

        # disable logger (to avoid cluttering test output)
        logging.disable(logging.ERROR)

        memoizer = Memoizer()

        @memoizer.memoize()
        def f():
            return random.randrange(0, 100000)

        old_value = self.memoizer.update_memoized(f)
        fname, instance_fname = function_namespace(f, args=[])
        version_key = self.memoizer._memvname(fname)
        version_val = self.memoizer.get(version_key)
        memoizer_set.assert_called_with(
            version_key,
            version_val,
            timeout=f.cache_timeout
        )

        memoizer_set.reset_mock()

        new_value = self.memoizer.update_memoized(f)
        memoizer_set.assert_called_with(
            version_key,
            version_val,
            timeout=f.cache_timeout
        )

        assert new_value != old_value

        # re-enable logger
        logging.disable(logging.NOTSET)
Esempio n. 12
0
 def setUp(self):
     self.store = {}
     self.records = []
     self.memo = Memoizer(self.store, **self.memo_kwargs)
Esempio n. 13
0
def make_auctions_app(global_conf,
                      redis_url='redis://localhost:9002/1',
                      redis_password='',
                      redis_database='',
                      sentinel_cluster_name='',
                      sentinels='',
                      external_couch_url='http://localhost:5000/auction',
                      internal_couch_url='http://localhost:9000/',
                      proxy_internal_couch_url='http://localhost:9000/',
                      auctions_db='database',
                      hash_secret_key='',
                      timezone='Europe/Kiev',
                      preferred_url_scheme='http',
                      debug=False,
                      auto_build=False,
                      event_source_connection_limit=1000,
                      limit_replications_progress=99,
                      limit_replications_func='any'):
    """
    [app:main]
    use = egg:openprocurement.auction#auctions_server
    redis_url = redis://:passwod@localhost:1111/0
    external_couch_url = http://localhost:1111/auction
    internal_couch_url = http://localhost:9011/
    auctions_db = auction
    timezone = Europe/Kiev
    """
    auctions_server = components.queryUtility(IAuctionsServer)
    auctions_server.proxy_connection_pool = ConnectionPool(factory=Connection,
                                                           max_size=20,
                                                           backend='gevent')
    auctions_server.proxy_mappings = Memoizer({})
    auctions_server.event_sources_pool = deque([])
    auctions_server.config['PREFERRED_URL_SCHEME'] = preferred_url_scheme
    auctions_server.config['limit_replications_progress'] = float(
        limit_replications_progress)
    auctions_server.config['limit_replications_func'] = limit_replications_func

    auctions_server.config['REDIS'] = {
        'redis': redis_url,
        'redis_password': redis_password,
        'redis_database': redis_database,
        'sentinel_cluster_name': sentinel_cluster_name,
        'sentinel': loads(sentinels)
    }

    auctions_server.config['event_source_connection_limit'] = int(
        event_source_connection_limit)
    auctions_server.config['EXT_COUCH_DB'] = urljoin(external_couch_url,
                                                     auctions_db)
    auctions_server.add_url_rule('/' + auctions_db + '/<path:path>',
                                 'couch_server_proxy',
                                 couch_server_proxy,
                                 methods=['GET'])
    auctions_server.add_url_rule('/' + auctions_db + '/',
                                 'couch_server_proxy',
                                 couch_server_proxy,
                                 methods=['GET'],
                                 defaults={'path': ''})

    auctions_server.add_url_rule('/' + auctions_db + '_secured/<path:path>',
                                 'auth_couch_server_proxy',
                                 auth_couch_server_proxy,
                                 methods=['GET'])
    auctions_server.add_url_rule('/' + auctions_db + '_secured/',
                                 'auth_couch_server_proxy',
                                 auth_couch_server_proxy,
                                 methods=['GET'],
                                 defaults={'path': ''})

    auctions_server.config['INT_COUCH_URL'] = internal_couch_url
    auctions_server.config['PROXY_COUCH_URL'] = proxy_internal_couch_url
    auctions_server.config['COUCH_DB'] = auctions_db
    auctions_server.config['TIMEZONE'] = tz(timezone)

    auctions_server.couch_server = Server(
        auctions_server.config.get('INT_COUCH_URL'),
        session=Session(retry_delays=range(10)))
    if auctions_server.config['COUCH_DB'] not in auctions_server.couch_server:
        auctions_server.couch_server.create(auctions_server.config['COUCH_DB'])

    auctions_server.db = auctions_server.\
        couch_server[auctions_server.config['COUCH_DB']]
    auctions_server.config['HASH_SECRET_KEY'] = hash_secret_key
    sync_design(auctions_server.db)
    for entry_point in iter_entry_points(PKG_NAMESPACE):
        plugin = entry_point.load()
        plugin(components)
    return auctions_server
Esempio n. 14
0
import json
import os
from os import path
from pathlib import Path

import gspread
import pygsheets
from memoize import Memoizer
from oauth2client.service_account import ServiceAccountCredentials
from pygsheets import Spreadsheet

user_spreadsheet_store = {}
memo = Memoizer(user_spreadsheet_store)


class MissingGoogleKeyException(Exception):
    pass


def get_credentials():
    scope = ['https://spreadsheets.google.com/feeds',
             'https://www.googleapis.com/auth/drive']
    google_key = os.environ.get("PC_GOOGLE_KEY")
    if google_key:
        google_key_json_payload = json.loads(google_key)
        key_path = '/tmp/data.json'
        with open(key_path, 'w') as outfile:
            json.dump(google_key_json_payload, outfile)
        credentials = ServiceAccountCredentials.from_json_keyfile_name(key_path, scope)
        os.remove(key_path)
        return credentials
Esempio n. 15
0
 def setUp(self):
     self.memoizer = Memoizer()
Esempio n. 16
0
from memoize import Memoizer

PYPI_URL_PATTERN = 'https://pypi.python.org/pypi/{package}/json'

CHANNEL_URL_PATTERN = 'https://conda.anaconda.org/{channel}/{platform}/repodata.json'

REDIS_CONN = redis.StrictRedis(host=os.environ.get('REDIS_HOST', '127.0.0.1'),
                               port=os.environ.get('REDIS_PORT', '6379'),
                               password=os.environ.get('REDIS_PASSWORD',
                                                       '¯\_(ツ)_/¯'))

CHANNELS = ['conda-forge', 'anaconda', 'c3i_test']

PKG_INFO = {}

memo = Memoizer({})


# https://stackoverflow.com/a/34366589/1005215
def get_pypi_version(package, url_pattern=PYPI_URL_PATTERN):
    """Return version of package on pypi.python.org using json."""
    req = requests.get(url_pattern.format(package=package))
    version = parse('0')
    if req.status_code == requests.codes.ok:
        j = json.loads(req.text.encode(req.encoding))
        if 'releases' in j:
            versions = [parse(s) for s in j['releases']]
            filtered = [v for v in versions if not v.is_prerelease]
            if len(filtered) == 0:
                return max(versions)
            else:
Esempio n. 17
0
from memoize import Memoizer
import requests
import utils.errors as errors
from utils import config

memoKeys = {}
memo = Memoizer(memoKeys)


@memo(max_age=3600)
def validate_token(token):

    if not isinstance(token, str) or len(token) == 0:
        raise errors.InvalidAuth()

    headers = {'Authorization'.encode(): token.encode()}

    host = config.get_auth_server_url()
    port = config.get_auth_server_port()
    auth_url = ':'.join([host, str(port)])

    response = requests.get('/'.join(['http:/', auth_url, 'v1/users/current']),
                            headers=headers)

    if response.status_code != 200:
        raise errors.InvalidAuth()

    result = response.json()
    if not result:
        raise errors.InvalidAuth()
Esempio n. 18
0
def make_auctions_app(global_conf,
                      redis_url='redis://localhost:7777/0',
                      external_couch_url='http://localhost:5000/auction',
                      internal_couch_url='http://localhost:9000/',
                      proxy_internal_couch_url='http://localhost:9000/',
                      auctions_db='auctions',
                      hash_secret_key='',
                      timezone='Europe/Kiev',
                      preferred_url_scheme='http',
                      debug=False,
                      auto_build=False,
                      event_source_connection_limit=1000):
    """
    [app:main]
    use = egg:openprocurement.auction#auctions_server
    redis_url = redis://:passwod@localhost:1111/0
    external_couch_url = http://localhost:1111/auction
    internal_couch_url = http://localhost:9011/
    auctions_db = auction
    timezone = Europe/Kiev
    """
    auctions_server.proxy_connection_pool = ConnectionPool(factory=Connection,
                                                           max_size=20,
                                                           backend="gevent")
    auctions_server.proxy_mappings = Memoizer({})
    auctions_server.event_sources_pool = deque([])
    auctions_server.config['PREFERRED_URL_SCHEME'] = preferred_url_scheme
    auctions_server.config['REDIS_URL'] = redis_url
    auctions_server.config['event_source_connection_limit'] = int(
        event_source_connection_limit)
    auctions_server.config['EXT_COUCH_DB'] = urljoin(external_couch_url,
                                                     auctions_db)
    auctions_server.add_url_rule('/' + auctions_db + '/<path:path>',
                                 'couch_server_proxy',
                                 couch_server_proxy,
                                 methods=['GET'])
    auctions_server.add_url_rule('/' + auctions_db + '/',
                                 'couch_server_proxy',
                                 couch_server_proxy,
                                 methods=['GET'],
                                 defaults={'path': ''})

    auctions_server.add_url_rule('/' + auctions_db + '_secured/<path:path>',
                                 'auth_couch_server_proxy',
                                 auth_couch_server_proxy,
                                 methods=['GET'])
    auctions_server.add_url_rule('/' + auctions_db + '_secured/',
                                 'auth_couch_server_proxy',
                                 auth_couch_server_proxy,
                                 methods=['GET'],
                                 defaults={'path': ''})

    auctions_server.config['INT_COUCH_URL'] = internal_couch_url
    auctions_server.config['PROXY_COUCH_URL'] = proxy_internal_couch_url
    auctions_server.config['COUCH_DB'] = auctions_db
    auctions_server.config['TIMEZONE'] = tz(timezone)
    auctions_server.redis = Redis(auctions_server)
    auctions_server.couch_server = Server(
        auctions_server.config.get('INT_COUCH_URL'),
        session=Session(retry_delays=range(10)))
    if auctions_server.config['COUCH_DB'] not in auctions_server.couch_server:
        auctions_server.couch_server.create(auctions_server.config['COUCH_DB'])

    auctions_server.db = auctions_server.couch_server[
        auctions_server.config['COUCH_DB']]
    auctions_server.config['HASH_SECRET_KEY'] = hash_secret_key
    sync_design(auctions_server.db)
    auctions_server.config['ASSETS_DEBUG'] = True if debug else False
    assets.auto_build = True if auto_build else False
    return auctions_server
Esempio n. 19
0
def mock_auctions_server(request, mocker):
    params = getattr(request, 'param', {})

    server_config_redis = params.get('server_config_redis', DEFAULT)
    connection_limit = params.get('connection_limit', DEFAULT)
    get_mapping = params.get('get_mapping', DEFAULT)
    proxy_path = params.get('proxy_path', DEFAULT)
    event_sources_pool = params.get('event_sources_pool', DEFAULT)
    proxy_connection_pool = params.get('proxy_connection_pool', DEFAULT)
    stream_proxy = params.get('stream_proxy', DEFAULT)
    db = params.get('db', DEFAULT)
    request_headers = params.get('request_headers', [])
    request_url = params.get('request_url', DEFAULT)
    redirect_url = params.get('redirect_url', DEFAULT)
    abort = params.get('abort', DEFAULT)

    class AuctionsServerAttributesContainer(object):
        logger = NotImplemented
        proxy_mappings = NotImplemented
        config = NotImplemented
        event_sources_pool = NotImplemented
        proxy_connection_pool = NotImplemented
        get_mapping = NotImplemented
        db = NotImplemented
        request_headers = NotImplemented

    class Request(object):
        headers = NotImplemented
        environ = NotImplemented
        url = NotImplemented

    class Config(object):
        __getitem__ = NotImplemented

    def config_getitem(item):
        if item == 'REDIS':
            return server_config_redis
        elif item == 'event_source_connection_limit':
            return connection_limit
        else:
            raise KeyError

    mock_path_info = MagicMock()

    def environ_setitem(item, value):
        if item == 'PATH_INFO':
            mock_path_info(value)
            return value
        else:
            raise KeyError

    mocker.patch.object(auctions_server_module, 'get_mapping', get_mapping)
    patch_pysse = mocker.patch.object(auctions_server_module,
                                      'PySse',
                                      spec_set=PySse)
    patch_add_message = patch_pysse.return_value.add_message

    patch_request = mocker.patch.object(auctions_server_module,
                                        'request',
                                        spec_set=Request)
    patch_request.environ.__setitem__.side_effect = environ_setitem
    patch_request.headers = request_headers
    patch_request.url = request_url

    patch_redirect = mocker.patch.object(auctions_server_module,
                                         'redirect',
                                         return_value=redirect_url)
    patch_abort = mocker.patch.object(auctions_server_module,
                                      'abort',
                                      return_value=abort)

    patch_StreamProxy = \
        mocker.patch.object(auctions_server_module, 'StreamProxy',
                            return_value=stream_proxy)

    auctions_server = NonCallableMock(
        spec_set=AuctionsServerAttributesContainer)

    logger = MagicMock(spec_set=frontend.logger)
    proxy_mappings = MagicMock(spec_set=Memoizer({}))
    proxy_mappings.get.return_value = proxy_path
    config = MagicMock(spec_set=Config)
    config.__getitem__.side_effect = config_getitem

    auctions_server.logger = logger
    auctions_server.proxy_mappings = proxy_mappings
    auctions_server.config = config
    auctions_server.event_sources_pool = event_sources_pool
    auctions_server.proxy_connection_pool = proxy_connection_pool
    auctions_server.db = db

    mocker.patch.object(auctions_server_module, 'auctions_server',
                        auctions_server)

    return {
        'server': auctions_server,
        'proxy_mappings': proxy_mappings,
        'mock_path_info': mock_path_info,
        'patch_StreamProxy': patch_StreamProxy,
        'patch_redirect': patch_redirect,
        'patch_abort': patch_abort,
        'patch_PySse': patch_pysse,
        'patch_add_message': patch_add_message
    }
Esempio n. 20
0
from memoize import Memoizer

memoized = Memoizer()

@memoized
def fib(n):
    if n <= 1: return n
    return fib(n - 1) + fib(n - 2)

@memoized
def choose(n, k):
    if k < 0 or k > n: return 0
    if k == 0 or k == n: return 1
    return choose(n - 1, k - 1) + choose(n - 1, k)
class ep_risk:
    #example folio_dict: {’JNJ’:10**6, ’AMGN’:10**6, ’UNH’:10**6, ’MYL’:10**6, ’A’: 10**6, ’XLV’: -(5*10**6)}
    def __init__(self, folio_dict):
        self.port = folio_dict

    memo = Memoizer({})
    #caches the data pulls for up to 60 seconds
    @memo(max_age=60)
    def get_data(self, end_date="01/15/15", window_length=700):
        last_date = datetime.datetime.strptime(end_date, "%x")
        first_date = last_date - datetime.timedelta(days=window_length)
        s = first_date.__str__()
        l = last_date.__str__()

        #Create List of Dataframes for all stocks
        df_s = [self.prices(tick, s, l) for tick in self.port.keys()]
        #Merge that List of Dataframes
        df_final = df_s[0]
        for i in range(1, len(df_s)):
            df_final = DataFrame.join(df_final, df_s[i])

        return df_final

    def prices(self, ticker, start_string, end_string):
        #holds key:Date Val:Adj Close
        data_hash = dict()
        #Create dictionary from yahoo data
        tick_test = ystockquote.get_historical_prices(ticker, start_string,
                                                      end_string)
        #fill data_hash
        for date in tick_test.keys():
            data_hash[datetime.date(int(date[:4]), int(date[5:7]), int(
                date[8:]))] = float(tick_test[date]['Adj Close'])
        #make datahash into dataframe called df
        df = DataFrame.from_dict(data_hash, orient='index', dtype=float)
        #rename first column
        df.columns = [ticker]
        #sort in date order
        df = df.sort(ascending=True, inplace=False)
        return df

    def data_stats(self, n_days=2, end_date="01/15/15", window_length=700):
        price_data = self.get_data(end_date=end_date,
                                   window_length=window_length)
        #calculate n-day returns
        returns = price_data.pct_change(periods=n_days,
                                        fill_method='pad',
                                        limit=None,
                                        freq=None)
        #apply log transformatinon
        returns = 1 + returns
        returns = returns.applymap(m.log)

        #calculate means and volatilities
        means = returns.mean(axis=0)
        vols = returns.std(axis=0)

        #calc correlations
        correlation_matrix = returns.corr()

        #returns lambda_plus for PCA approaches
        lambda_plus = (1 + m.sqrt(
            float(len(self.port.keys())) / max([
                returns.ix[:, i].count()
                for i in range(0, len(self.port.keys()))
            ])))**2

        return correlation_matrix, means, vols, lambda_plus

    def pca_risk(self,
                 alpha,
                 n_days=2,
                 end_date="01/15/15",
                 window_length=700):
        correlation_matrix, means, vols, lambda_plus = self.data_stats(
            n_days=n_days, end_date=end_date, window_length=window_length)
        #return eigenvectors and values (eigenvectors are columns)
        e_vals, e_vecs = np.linalg.eigh(correlation_matrix)
        #sort (largest to smallest)
        indices = e_vals.argsort()[::-1]
        sorted_evals = [e_vals[i] for i in indices]
        sorted_evecs = e_vecs[:, indices]

        significant_num = len(
            [value for value in sorted_evals if value > lambda_plus])
        cleaned = self.clean_correlation_matrix(significant_num, sorted_evals,
                                                sorted_evecs)
        cleaned_df = DataFrame(data=cleaned,
                               index=correlation_matrix.columns.tolist(),
                               columns=correlation_matrix.columns.tolist(),
                               dtype=float,
                               copy=True)

        var, es = self.mc_risk(alpha, cleaned_df, means, vols)
        return var, es

    def unclean_risk(self,
                     alpha,
                     n_days=2,
                     end_date="01/15/15",
                     window_length=700):
        correlation_matrix, means, vols, lambda_plus = self.data_stats(
            n_days=n_days, end_date=end_date, window_length=window_length)
        var, es = self.mc_risk(alpha, correlation_matrix, means, vols)
        return var, es

    def historical_risk(self,
                        alpha,
                        n_days=2,
                        end_date="01/15/15",
                        window_length=700):
        price_data = self.get_data(end_date=end_date,
                                   window_length=window_length)
        returns = price_data.pct_change(periods=n_days,
                                        fill_method='pad',
                                        limit=None,
                                        freq=None)
        return self.port_rets(returns, alpha)

    def mc_risk(self, alpha, corr_df, means, vols):
        #generate 50,000 samples from T dist
        joint_scenarios = self.multivariatet_sampler(
            np.zeros(len(self.port.keys())), corr_df, 4, 50000)
        #scale these scenarios by multiplying by scaling vol then shifting by mean
        joint_scenarios = m.sqrt(1 / 2.) * np.dot(
            joint_scenarios, np.diag(vols)) + np.tile(means, (50000, 1))
        #calculate portfolio returns
        var_alpha, es_alpha = self.port_rets(joint_scenarios, alpha)
        return var_alpha, es_alpha

    def port_rets(self, scenarios, alpha):
        port_vec = np.array(self.port.values())
        sim_returns = np.dot(scenarios, port_vec)
        var_alpha = np.percentile(sim_returns, alpha)
        es_alpha = np.mean([v for v in sim_returns.tolist() if v < var_alpha])
        return var_alpha, es_alpha

    def clean_correlation_matrix(self, significant_num, lambdas, vecs):
        dimension = len(lambdas)
        matrix = np.zeros((dimension, dimension))
        for i in range(0, dimension):
            for j in range(0, dimension):
                ro = 0
                for k in range(0, significant_num):
                    ro += lambdas[k] * vecs[:, k][i] * vecs[:, k][j]
                if (i == j):
                    eii_sq = 1
                    for k in range(0, significant_num):
                        eii_sq = eii_sq - lambdas[k] * vecs[:,
                                                            k][i] * vecs[:,
                                                                         k][i]
                    ro = ro + eii_sq
                matrix[i, j] = ro
        return matrix

    #Known Function to generate samples from multivariate t in python (Kenny Chowdhary - PhD)
    def multivariatet_sampler(self, mu, Sigma, N, M):
        '''
        Output:
        Produce M samples of d-dimensional multivariate t distribution
        Input:
        mu = mean (d dimensional numpy array or scalar)
        Sigma = scale matrix (dxd numpy array)
        N = degrees of freedom
        M = # of samples to produce
        '''
        d = len(Sigma)
        g = np.tile(np.random.gamma(N / 2., 2. / N, M), (d, 1)).T
        Z = np.random.multivariate_normal(np.zeros(d), Sigma, M)
        return mu + Z / np.sqrt(g)
Esempio n. 22
0
import calendar
import datetime
import functools

from memoize import Memoizer

store = {}
memo = Memoizer(store)


def next_month(year, month):
    """
    Return the year and month of the next month
    NOTE: month is 1 based
    """
    return add_months(year, month, 1)


def add_months(year, month, months):
    """
    Return the year and month of #months after the given year and month
    NOTE: month is 1 based
    """
    month += months
    while month > 12:
        month -= 12
        year += 1
    return year, month


def record_runtime(func):
Esempio n. 23
0
from urlparse import urljoin
import requests
from cloudfoundry.apps import CloudFoundryApp
from cloudfoundry.organizations import CloudFoundryOrg
from cloudfoundry.spaces import CloudFoundrySpace
from cloudfoundry.routes import CloudFoundryRoute
from cloudfoundry.domains import CloudFoundryDomain
from utils import create_bits_zip
from collections import OrderedDict
import logging
import time
from memoize import Memoizer


cache_store = {}
memo = Memoizer(cache_store)
max_cache_time = 10


class CloudFoundryException(Exception):
    pass


class CloudFoundryAuthenticationException(CloudFoundryException):
    pass


class CloudFoundryInterface(object):

    def __init__(self, target, username=None, password=None, debug=False):
        self._apps = None