Exemple #1
0
def main() -> None:
    #    df = pd.read_json(filename)
    GsSession.use(
        Environment.PROD, '77d7c80dec0b44e9868dfaa3a7e2cb36',
        '4edbc70b2249de3ddc9f303bb373575cb06839fb6857570648fdb772ccf8e377',
        ('read_product_data', ))
    ds_ecdc = Dataset('COVID19_COUNTRY_DAILY_ECDC')
    ds_who = Dataset('COVID19_COUNTRY_DAILY_WHO')

    data_who = ds_who.get_data(datetime.date(2020, 1, 21),
                               countryId=["US", "GB", "IN", "BR", "NG", "NZ"])

    df_ecdc = ds_ecdc.get_data(start=datetime.date(2019, 12, 31),
                               end=datetime.date(2020, 6, 18),
                               countryId=["US", "GB", "BR", "NZ", "IN", "NG"])
    df_ecdc["casePopulation"] = df_ecdc["newConfirmed"] / df_ecdc["population"]
    df_ecdc['rateOfChange'] = (
        df_ecdc['newConfirmed'] -
        df_ecdc['newConfirmed'].shift()) / df_ecdc['newConfirmed'].shift()
    df_ecdc['rateOfChange'] = df_ecdc['rateOfChange'].fillna(0)

    print(data_who)

    app = render_app(df_ecdc, data_who)
    app.run_server(port=2000)
Exemple #2
0
 def get_data(self, state: Union[datetime.date, datetime.datetime] = None):
     if self._loaded_data is None:
         ds = Dataset(self._data_set)
         if self._min_date:
             self._loaded_data = ds.get_data(self._min_date,
                                             self._max_date,
                                             assetId=(self._asset_id, ))
         else:
             return ds.get_data(
                 state, state,
                 assetId=(self._asset_id, ))[self._value_header]
     return self._loaded_data[self._value_header].at[pd.to_datetime(state)]
Exemple #3
0
    def __get_direct_underliers(self, asset_id, dataset) -> pd.DataFrame:
        """
        Queries the dataset for the date passed during initialisation. If date isn't passed, returns the data of the
        latest available date.
        """
        ds = Dataset(dataset)
        if self.date:
            query = ds.get_data(start=self.date, end=self.date, assetId=[asset_id]).drop_duplicates()
        else:
            query = ds.get_data(assetId=[asset_id]).drop_duplicates()

        if len(query) > 0:
            self.date = query.index.max().date()
            query = query[query.index == query.index.max()].reset_index()
        return query
Exemple #4
0
 def get_data(self, state):
     if self._loaded_data is None:
         ds = Dataset(self._data_set)
         self._loaded_data = ds.get_data(self._min_date or state,
                                         self._max_date or state,
                                         assetId=(self._asset_id, ))
     return self._loaded_data[self]
Exemple #5
0
 def get_data(self, state: Union[datetime.date, datetime.datetime] = None):
     if self._loaded_data is None:
         ds = Dataset(self._data_set)
         self._loaded_data = ds.get_data(self._min_date or state,
                                         self._max_date or state,
                                         assetId=(self._asset_id, ))
     return self._loaded_data[self._value_header]
Exemple #6
0
 def populate_values(self, dataset, value_column, underlier_column):
     ds = Dataset(dataset)
     query = ds.get_data(start=self.date, end=self.date, assetId=[self.id])
     if len(query) > 0:
         for node in self.direct_underlier_assets_as_nodes:
             value = query.loc[query[underlier_column] == node.id][value_column].iloc[0]
             node.data[value_column] = value
             node.populate_values(dataset, value_column, underlier_column)
Exemple #7
0
    def holidays(self) -> set:
        if self.__calendars and not self.__holidays_loaded:
            dataset = Dataset(Dataset.GS.HOLIDAY)
            data = dataset.get_data(exchange=self.__calendars, start=self.DATE_LOW_LIMIT, end=self.DATE_HIGH_LIMIT)
            if not data.empty:
                self.__holidays.update(data.index.values.astype('datetime64[D]'))
            self.__holidays_loaded = True

        return self.__holidays
Exemple #8
0
 def get_fx_spot_series(self) -> pd.Series:
     ds = Dataset('WMFXSPOT')
     coverage = ds.get_coverage()
     cross = self.currency + '/' + self.source_asset.currency
     asset_id = coverage[coverage['name'] == cross]['assetId'].values[0]
     time_series = ds.get_data(assetId=asset_id,
                               start=dt.date(1970, 1, 1))['midPrice']
     time_series = time_series[~time_series.index.duplicated(keep='last')]
     time_series.name = cross
     return time_series
Exemple #9
0
    def _remote_load(self) -> DataFrame:
        dataset = Dataset(
            'COVID19_COUNTRY_DAILY_WHO')  # initialize the dataset
        try:
            frame = dataset.get_data(countryId='US', start=date(
                2019, 1, 1))  # pull the US data into a Pandas dataframe
        except MqRequestError:
            frame = DataFrame()

        frame.reset_index(inplace=True)
        return frame
Exemple #10
0
def test_query_data_types(mocker):
    mocker.patch("gs_quant.api.gs.data.GsDataApi.query_data",
                 return_value=test_data)
    mocker.patch("gs_quant.api.gs.data.GsDataApi.get_types",
                 return_value=test_types)
    dataset = Dataset(Dataset.TR.TREOD)
    data = dataset.get_data(dt.date(2019, 1, 2),
                            dt.date(2019, 1, 9),
                            assetId='MA4B66MW5E27U8P32SB')
    assert data.equals(
        GsDataApi.construct_dataframe_with_types(str(Dataset.TR.TREOD),
                                                 test_data))
def build_eq_vol_scenario_eod(
    asset_name: str,
    source_dataset: str,
    ref_spot: float = None,
    asset_name_type: AssetIdentifier = AssetIdentifier.REUTERS_ID,
    vol_date: date = date.today()
) -> MarketDataVolShockScenario:

    asset = SecurityMaster.get_asset(asset_name, asset_name_type)
    vol_dataset = Dataset(source_dataset)
    vol_data = vol_dataset.get_data(assetId=[asset.get_marquee_id()],
                                    strikeReference='forward',
                                    startDate=vol_date,
                                    endDate=vol_date)
    asset_ric = asset.get_identifier(AssetIdentifier.REUTERS_ID)
    return MarketDataVolShockScenario.from_dataframe(asset_ric, vol_data,
                                                     ref_spot)
def volatility_screen(crosses, start_date, end_date, tenor='3m', plot=True):
    fxspot_dataset, fxvol_dataset = Dataset('FXSPOT'), Dataset('FXIMPLIEDVOL')
    spot_data, impvol_data, spot_fx, data = {}, {}, {}, {}
    for cross in crosses:
        spot = fxspot_dataset.get_data(start_date, end_date, bbid=cross)[[
            'spot'
        ]].drop_duplicates(keep='last')
        spot_fx[cross] = spot['spot']
        spot_data[cross] = volatility(spot['spot'], tenor)  # realized vol
        vol = fxvol_dataset.get_data(start_date,
                                     end_date,
                                     bbid=cross,
                                     tenor=tenor,
                                     deltaStrike='DN',
                                     location='NYC')[['impliedVolatility']]
        impvol_data[cross] = vol.drop_duplicates(keep='last') * 100

    spdata, ivdata = format_df(spot_data), format_df(impvol_data)
    diff = ivdata.subtract(spdata).dropna()
    for cross in crosses:
        data[cross] = {
            'Spot': last_value(spot_fx[cross]),
            '{} Implied'.format(tenor): last_value(ivdata[cross]),
            '{} Realized'.format(tenor): last_value(spdata[cross]),
            'Diff': last_value(diff[cross]),
            'Historical Implied Low': min(ivdata[cross]),
            'Historical Implied High': max(ivdata[cross]),
            '%-ile': last_value(percentiles(ivdata[cross]))
        }
    df = pd.DataFrame(data)
    vol_screen = df.transpose()
    st.write(st.dataframe(vol_screen.style.highlight_max(axis=0)))
    if plot:
        for fx in vol_screen.index:
            plt.scatter(vol_screen.loc[fx]['%-ile'],
                        vol_screen.loc[fx]['Diff'])
            plt.legend(vol_screen.index,
                       loc='best',
                       bbox_to_anchor=(0.9, -0.13),
                       ncol=3)

        plt.xlabel('Percentile of Current Implied Vol')
        plt.ylabel('Implied vs Realized Vol')
        plt.title('Entry Point vs Richness')
        st.pyplot(plt)
    return
def build_eq_vol_scenario_intraday(
    asset_name: str,
    source_dataset: str,
    ref_spot: float = None,
    asset_name_type: AssetIdentifier = AssetIdentifier.REUTERS_ID,
    start_time: datetime = datetime.now() - timedelta(hours=1),
    end_time: datetime = datetime.now()
) -> MarketDataVolShockScenario:

    asset = SecurityMaster.get_asset(asset_name, asset_name_type)
    vol_dataset = Dataset(source_dataset)
    vol_data = vol_dataset.get_data(assetId=[asset.get_marquee_id()],
                                    strikeReference='forward',
                                    startTime=start_time,
                                    endTime=end_time)
    asset_ric = asset.get_identifier(AssetIdentifier.REUTERS_ID)
    return MarketDataVolShockScenario.from_dataframe(asset_ric, vol_data,
                                                     ref_spot)
def fci(country_id: str,
        measure: _FCI_MEASURE = _FCI_MEASURE.FCI,
        *,
        source: str = None,
        real_time: bool = False,
        request_id: Optional[str] = None) -> pd.Series:
    """
    Daily Financial Conditions Index (FCI) for each of the world's large economies and many smaller ones,
    as well as aggregate FCIs for regions.

    :param country_id: id of country/region
    :param measure: FCI metric to retrieve
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :param request_id: server request id
    :return: FCI metric value
    """
    if real_time:
        raise NotImplementedError('real-time FCI data is not available')

    type_ = QueryType(inflection.titleize(measure.value))
    if (measure == _FCI_MEASURE.REAL_FCI
            or measure == _FCI_MEASURE.REAL_TWI_CONTRIBUTION):
        ds = Dataset('FCI')
        df = ds.get_data(geographyId=country_id)
        if (measure == _FCI_MEASURE.REAL_FCI):
            measure = 'realFCI'
        else:
            measure = 'realTWIContribution'
        series = ExtendedSeries(dtype=float) if (
            measure not in df.columns) else ExtendedSeries(df[measure])
        series.dataset_ids = ('FCI', )
        return series

    q = GsDataApi.build_market_data_query([country_id],
                                          query_type=type_,
                                          source=source,
                                          real_time=real_time)
    df = _market_data_timed(q, request_id)
    return _extract_series_from_df(df, type_, True)
Exemple #15
0
""" BROKEN """
import datetime
from credentials.config import GSConfig

from gs_quant.data import Dataset
from gs_quant.session import GsSession, Environment

GsSession.use(Environment.PROD, GSConfig.client_id, GSConfig.client_secret, ('read_product_data',))

ds = Dataset('COVID19_COUNTRY_DAILY_CDC')
data = ds.get_data(start=datetime.date(2020, 1, 21), countryId="US")
print(data.head())  # peek at first few rows of data
Exemple #16
0
def test_query_data_types(query_data, get_types):
    query_data.return_value = test_data
    get_types.return_value = test_types
    dataset = Dataset(Dataset.TR.TREOD)
    data = dataset.get_data(dt.date(2019, 1, 2), dt.date(2019, 1, 9), assetId='MA4B66MW5E27U8P32SB')
    assert data.equals(construct_dataframe_with_types(str(Dataset.TR.TREOD), test_data))
Exemple #17
0
import datetime

from gs_quant.data import Dataset
from gs_quant.session import GsSession, Environment

GsSession.use(
    Environment.PROD, '77d7c80dec0b44e9868dfaa3a7e2cb36',
    '4edbc70b2249de3ddc9f303bb373575cb06839fb6857570648fdb772ccf8e377',
    ('read_product_data', ))

ds = Dataset('COVID19_COUNTRY_DAILY_WIKI')
# data = ds.get_data()
# print(data)  # peek at first few rows of data
data = ds.get_data(start=datetime.date(2019, 1, 20),
                   countryId=["US", "GB", "BR", "NZ", "IN"])
print(data)
# data.reset_index(inplace=True)
# data.to_json(r'wiki.json')
Exemple #18
0
# test API connectivity
request_url = 'https://api.marquee.gs.com/v1/users/self'
request = session.get(url=request_url)
print(request.text)
'''
from datetime import date
from gs_quant.data import Dataset
from gs_quant.markets.securities import SecurityMaster, AssetIdentifier 
from gs_quant.session import GsSession

client_id = ''
client_secret = ''

scopes = GsSession.Scopes.get_default()
GsSession.use(client_id=client_id, client_secret=client_secret, scopes=scopes)

ds = Dataset('USCANFPP_MINI')
print (ds)

gsids = ds.get_coverage()['gsid'].values.tolist()
df = ds.get_data(date(2012, 7, 2), date(2017, 6, 30), gsid=gsids[0:5])

print (df)

for idx, row in df.iterrows():
    marqueeAssetId = row['assetId']
    asset = SecurityMaster.get_asset(marqueeAssetId, AssetIdentifier.MARQUEE_ID)
    df.loc[df['assetId'] == marqueeAssetId, 'assetName'] = asset.name

print (df)
Exemple #19
0
for df in dfs['in'].values():
    initial_share_price = df['Close'].iloc[0]
    df['YTD Gain (%)'] = (df['Close'] -
                          initial_share_price) / initial_share_price * 100

for df in dfs['out'].values():
    initial_share_price = df['Close'].iloc[0]
    df['YTD Gain (%)'] = (df['Close'] -
                          initial_share_price) / initial_share_price * 100

CLIENT_ID = config.CLIENT_ID
CLIENT_SECRET = config.CLIENT_SECRET
START, END = date(2020, 1, 2), date(2020, 12, 11)
GsSession.use(client_id=CLIENT_ID, client_secret=CLIENT_SECRET)
who_dataset = Dataset('COVID19_COUNTRY_DAILY_WHO')
who_data_frame = who_dataset.get_data(countryId='US', start=START, end=END)

app.layout = html.Div([
    dcc.Dropdown(id='stock-ticker-input-out',
                 options=[{
                     'label': key,
                     'value': str(key)
                 } for key in dfs['out'].keys()],
                 value=[
                     'SIX',
                 ],
                 multi=True),
    dcc.Graph(id="graph-in"),
    dcc.Dropdown(id='stock-ticker-input-in',
                 options=[{
                     'label': key,
Exemple #20
0
import plotly.express as px
from dash import dash
from gs_quant.session import GsSession, Environment
from gs_quant.data import Dataset
from datetime import date
from credentials.config import GSConfig

import numpy as np

import pandas as pd

GsSession.use(client_id=GSConfig.client_id,
              client_secret=GSConfig.client_secret,
              scopes=('read_product_data', ))

dataset = Dataset('COVID19_COUNTRY_DAILY_WHO')
df = dataset.get_data(countryId='US', start=date(2019, 1, 1))

app = dash.Dash(__name__)
app.layout = dash_table.DataTable(
    id='table',
    columns=[{
        "name": i,
        "id": i
    } for i in df.columns],
    data=df.to_dict('records'),
)

if __name__ == '__main__':
    app.run_server(debug=True)
Exemple #21
0
# update session headers
session.headers.update({'Authorization':'Bearer '+ access_token})

# test API connectivity
request_url = 'https://api.marquee.gs.com/v1/users/self'
request = session.get(url=request_url)

GsSession.use(Environment.PROD, client_id, client_secret, ('read_product_data',))

##################################################################
##################################################################

#Extract Data
ds_who = Dataset('COVID19_COUNTRY_DAILY_WHO')
data_who = ds_who.get_data(datetime.date(2020, 1, 21), countryId=["US", "GB", "IN", "BR", "NG", "NZ"])


# In[37]:


##################################################################
##################################################################

#Line graph for percentages
countries = set(data_who['countryName'])

line_p = go.Figure()

x = data_who.index
Exemple #22
0
from gs_quant.session import GsSession, Environment
from gs_quant.data import Dataset
from datetime import date
from credentials.config import GSConfig

GsSession.use(
    client_id=GSConfig.client_id,
    client_secret=GSConfig.client_secret,
    scopes=('read_product_data', ))  # authenticate GS Session with credentials

dataset = Dataset('COVID19_COUNTRY_DAILY_WIKI')  # initialize the dataset
frame = dataset.get_data(countryId='US', start=date(
    2019, 1, 1))  # pull the US data into a Pandas dataframe

print(frame)
print(list(frame.columns.values))
Exemple #23
0
import json
import os
import pandas as pd
import heapq

from gs_quant.data import Dataset
from gs_quant.session import GsSession, Environment

GsSession.use(
    Environment.PROD, 'b16a94fab7714a61b29065f6d6bda51b',
    '2179ad8fec38bbe8995f4d07293f9b476476dbef67b99f3a4074099de3fff049',
    ('read_product_data', ))

ds = Dataset('COVID19_COUNTRY_DAILY_WHO')
today = date.today()
countries = ds.get_data(today)[['countryName', 'countryId']].drop_duplicates()
country_id_name_dict = {}
for index, row in countries.iterrows():
    country_id_name_dict[row['countryId']] = row['countryName']


def get_new_daily_record_confirmed(queryType):
    return_countries = []
    country_ids = list(country_id_name_dict.keys())
    for country in country_ids:
        highest_confirmed = ds.get_data(datetime.date(2020, 1, 21),
                                        countryId=[country])[queryType].max()
        today_confirmed = ds.get_data(today, countryId=[country])[[queryType]]
        if highest_confirmed == today_confirmed[queryType].values[0]:
            return_countries.append(country_id_name_dict[country])
    if queryType == 'newConfirmed':
Exemple #24
0
# session.headers.update({"Authorization":"Bearer "+ access_token})

# request_url = "https://api.marquee.gs.com/v1/data/USCANFPP_MINI/query"

# request_query = {
#                     "where": {
#                         "gsid": ["string1","string2","string3"]
#                     },
#                     "startDate": "2012-01-01",
#                     "limit": 50
#                }

# request = session.post(url=request_url, json=request_query)
# results = json.loads(request.text)

# GS Quant documentation available at:
# https://developer.gs.com/docs/gsquant/guides/getting-started/

import datetime

from gs_quant.data import Dataset
from gs_quant.session import GsSession, Environment

GsSession.use(Environment.PROD, 'esobimpe', 'Henny123@',
              ('read_product_data', ))

ds = Dataset('USCANFPP_MINI')
#gsid=["10516","10696","11308"]
data = ds.get_data(datetime.date(2012, 1, 1), limit=50)
print(data.head())  # peek at first few rows of data
from datetime import date
from gs_quant.data import Dataset
from gs_quant.markets.securities import SecurityMaster, AssetIdentifier
from gs_quant.session import GsSession
import pandas as pd

itdoesntmatterjustdowhatever = open("data_raw.txt", 'w')
#sys.stdout = itdoesntmatterjustdowhatever

client_id = "b2a0a48a91854045b5c7eb12c8973901"
client_secret = "2cb4ec68204ddee53a1a892ea70ee110bc5db9c30aa04024336d69ea9e4fafc6"

scopes = GsSession.Scopes.get_default()
GsSession.use(client_id=client_id, client_secret=client_secret, scopes=scopes)

ds = Dataset("USCANFPP_MINI")

gsids = ds.get_coverage()["gsid"].values.tolist()
data = ds.get_data(date(2017, 1, 15), date(2018, 1, 15), gsid=gsids[0:5])

print(data.head())

for idx, row in data.iterrows():
    marqueeAssetId = row["assetId"]
    asset = SecurityMaster.get_asset(marqueeAssetId,
                                     AssetIdentifier.MARQUEE_ID)
    data.loc[data["assetId"] == marqueeAssetId, "assetName"] = asset.name

print(data.head())
itdoesntmatterjustdowhatever.write(data.to_string())
Exemple #26
0
import logging
import random

client_id = '75d63f91387b434a9c68c8100f45c372'
client_secret = '2567e2c8ad3a203660e2636d07a6a674cbb1fc84774d5870ecba43667a975e74'
print("here")
scopes = GsSession.Scopes.get_default()
GsSession.use(client_id=client_id, client_secret=client_secret, scopes=scopes)

ds = Dataset('USCANFPP_MINI')

gsids = ds.get_coverage()['gsid'].values.tolist()
id_to_names = {}
localDB = []
for gsid in gsids:
    data = ds.get_data(gsid=gsid)
    localDB.append(data)
    if not data.empty:
        id = data['assetId'][0]
        id_to_names[gsid] = (id,
                             SecurityMaster.get_asset(
                                 id, AssetIdentifier.MARQUEE_ID))

company_names = list(map(lambda x: (x[0], x[1].name), id_to_names.values()))


def print_all(conn):
    with conn.cursor() as cur:
        cur.execute("SELECT * FROM names")
        rows = cur.fetchall()
        for row in rows:
Exemple #27
0
from gs_quant.session import GsSession, Environment
from gs_quant.data import Dataset
from datetime import date
from credentials.config import GSConfig

from gs_quant.timeseries import datetime

import numpy as np
import pandas as pd
from plotly.subplots import make_subplots
import plotly.graph_objects as go

GsSession.use(client_id=GSConfig.client_id, client_secret=GSConfig.client_secret, scopes=('read_product_data',))

dataset = Dataset('COVID19_COUNTRY_DAILY_WHO')
# df = dataset.get_data(countryId='US', start=date(2019, 1, 1))

# fig = px.line(df, x=df.index.values, y="totalConfirmed", title='Total Confirmed Over Time')
# fig.show()
#
# fig = px.line(df, x=df.index.values, y="newFatalities", title='Total Confirmed Over Time')
# fig.show()

df = dataset.get_data(start=date(2019, 1, 1), limit=20)
# ds = Dataset('COVID19_COUNTRY_DAILY_CDC')
# data = ds.get_data(datetime.date(2020, 1, 21), countryId=[""], limit=50)
print(df.head())  # peek at first few rows of data

# px.scatter(AllCountries, x=AllCountries.values(np.float64).columns, y='totalConfirmed', trendline='ols',
#           facet_col="variable", facet_col_wrap=3).update_xaxes(matches=None)
Exemple #28
0
# GS Quant documentation available at:
# https://developer.gs.com/docs/gsquant/guides/getting-started/

import datetime

from gs_quant.data import Dataset
from gs_quant.session import GsSession, Environment

GsSession.use(
    Environment.PROD, '8eb297dd6ffc4278a93840bcd3199aee',
    '5d0d012c86fd0434111214f261a041e7564da029a50523a2f09013777b97d9f3',
    ('read_product_data', ))

ds = Dataset('USCANFPP_MINI')
data = ds.get_data(datetime.date(2017, 1, 15),
                   datetime.date(2018, 1, 15),
                   gsid=["75154", "193067", "194688", "902608", "85627"])
print(data.head())  # peek at first few rows of data