def _convert_pos_set_with_weights(position_set: PositionSet, currency: Currency) -> PositionSet: positions_to_price = [] for position in position_set.positions: if position.weight is None: raise MqValueError('If you are uploading a position set with a notional value, every position in that ' 'set must have a weight') if position.quantity is not None: raise MqValueError('If you are uploading a position set with a notional value, no position in that ' 'set can have a quantity') positions_to_price.append({ 'assetId': position.asset_id, 'weight': position.weight }) payload = { 'positions': positions_to_price, 'parameters': { 'targetNotional': position_set.reference_notional, 'currency': currency.value, 'pricingDate': position_set.date.strftime('%Y-%m-%d'), 'assetDataSetId': 'GSEOD', 'notionalType': 'Gross' } } try: price_results = GsSession.current._post('/price/positions', payload) except Exception as e: raise MqValueError('There was an error pricing your positions. Please try uploading your positions as ' f'quantities instead: {e}') positions = [Position(identifier=p['assetId'], asset_id=p['assetId'], quantity=p['quantity']) for p in price_results['positions']] return PositionSet(date=position_set.date, positions=positions)
def relative_date_add(date_rule: str, strict: bool = False) -> float: """Change the string in date rule format to the number of days. E.g 1d to 1, 1y to 365, 1m to 30, -1w to -7""" days = '' if re.search(DateRuleReg, date_rule) is not None: res = re.search(DateRuleReg, date_rule) date_str = res.group(1) if date_str[0] == '-': num = float(date_str[1:-1]) days = '-' else: num = float(date_str[:-1]) rule = date_str[-1:] if rule in DictDayRule: scale = DictDayRule[rule] days = days + str(num * scale) d = float(days) return d else: raise MqValueError( 'There are no valid day rule for the point provided.') if strict: raise MqValueError(f'invalid date rule {date_rule}') return 0
def _get_tdapi_fxo_assets_vol_swaps(**kwargs) -> Union[str, list]: # sanitize input for asset query. expiry_tenor = kwargs.get("expiry_tenor") ignore_list = ["expiry_tenor", "pricing_location"] inputs = {k: v for k, v in kwargs.items() if k not in ignore_list} assets = GsAssetApi.get_many_assets(**inputs) # For vol swaps we are not restricting assets using a filter # as asset service isn't setup for the parameters we pass in # instead query all assets and apply the filter in code here if len(assets) == 0: raise MqValueError('No assets found matching search criteria' + str(kwargs)) if expiry_tenor is not None: for asset in assets: if asset.parameters["lastFixingDate"].lower( ) == expiry_tenor.lower(): return asset.id raise MqValueError( 'Specified arguments did not match any asset in the dataset' + str(kwargs))
def get(cls, user_id: str = None, name: str = None, email: str = None): """ Resolve a user ID, name, email, and/or company into a single User object :param user_id: User's unique GS Marquee User ID :param name: User's name (formatted 'Last Name, First Name') :param email: User's email address :return: A Marquee User object that corresponds to requested parameters """ if all(arg is None for arg in [user_id, name, email]): raise MqValueError( 'Please specify a user id, name, or email address') user_id = user_id[5:] if user_id and user_id.startswith( 'guid:') else user_id results = GsUsersApi.get_users(user_ids=[user_id] if user_id else None, user_names=[name] if name else None, user_emails=[email] if email else None) if len(results) > 1: raise MqValueError( 'Error: This request resolves to more than one user in Marquee' ) if len(results) == 0: raise MqValueError('Error: No user found') return User(user_id=results[0].id, name=results[0].name, email=results[0].email, company=results[0].company)
def _get_mdapi_rates_assets(**kwargs) -> Union[str, list]: assets = GsAssetApi.get_many_assets(**kwargs) # change order of basis swap legs and check if swap in dataset if len(assets) == 0 and ('asset_parameters_payer_rate_option' in kwargs): # flip legs kwargs['asset_parameters_payer_rate_option'], kwargs['asset_parameters_receiver_rate_option'] = \ kwargs['asset_parameters_receiver_rate_option'], kwargs['asset_parameters_payer_rate_option'] kwargs['asset_parameters_payer_designated_maturity'], kwargs['asset_parameters_receiver_designated_maturity'] =\ kwargs['asset_parameters_receiver_designated_maturity'], \ kwargs['asset_parameters_payer_designated_maturity'] assets = GsAssetApi.get_many_assets(**kwargs) if len(assets) > 1: # term structure measures need multiple assets if ('asset_parameters_termination_date' not in kwargs) or ('asset_parameters_effective_date' not in kwargs): return [asset.id for asset in assets] else: raise MqValueError('Specified arguments match multiple assets') elif len(assets) == 0: raise MqValueError( 'Specified arguments did not match any asset in the dataset') else: return assets[0].id
def _get_tdapi_inflation_rates_assets(allow_many=False, **kwargs) -> Union[str, list]: # sanitize input for asset query. if "pricing_location" in kwargs: del kwargs["pricing_location"] assets = GsAssetApi.get_many_assets(**kwargs) if len(assets) == 0 and ('asset_parameters_clearing_house' in kwargs): # test without the clearing house if kwargs[ 'asset_parameters_clearing_house'] == tm_rates._ClearingHouse.NONE.value: del kwargs['asset_parameters_clearing_house'] assets = GsAssetApi.get_many_assets(**kwargs) if len(assets) > 1: # term structure measures need multiple assets if ('asset_parameters_termination_date' not in kwargs) or ('asset_parameters_effective_date' not in kwargs) or allow_many: return [asset.id for asset in assets] else: raise MqValueError('Specified arguments match multiple assets') elif len(assets) == 0: raise MqValueError( 'Specified arguments did not match any asset in the dataset' + str(kwargs)) else: return assets[0].id
def run(self, start_date: dt.date = None, end_date: dt.date = None, backcast: bool = None, is_async: bool = True): self.schedule(start_date, end_date, backcast) counter = 5 while counter > 0: try: job_future = self.get_most_recent_job() if is_async: return job_future counter = 100 while counter > 0: if job_future.done(): return job_future.result() sleep(6) raise MqValueError( f'Your report {self.id} is taking longer than expected to finish. Please contact the ' 'Marquee Analytics team at [email protected]') except IndexError: counter -= 1 status = Report.get(self.id).status if status == ReportStatus.waiting: raise MqValueError(f'Your report {self.id} is stuck in "waiting" status and therefore cannot be run at ' 'this time.') raise MqValueError(f'Your report {self.id} is taking longer to run than expected. ' 'Please reach out to the Marquee Analytics team at [email protected] ' 'for assistance.')
def _get_factor_data(report_id: str, factor_name: str, query_type: QueryType) -> pd.Series: # Check params report = RiskReport(report_id) if report.get_type() not in [ ReportType.Portfolio_Factor_Risk, ReportType.Asset_Factor_Risk ]: raise MqValueError('This report is not a factor risk report') risk_model_id = report.get_risk_model_id() factor = Factor(risk_model_id, factor_name) if factor.factor is None: raise MqValueError( 'Factor name requested is not available in the risk model associated with this report' ) # Extract relevant data for each date col_name = query_type.value.replace(' ', '') col_name = col_name[0].lower() + col_name[1:] data_type = QUERY_TO_FIELD_MAP[query_type] factor_data = report.get_factor_data(factor=factor.get_name()) factor_exposures = [{ 'date': data['date'], col_name: data[data_type] } for data in factor_data if data.get(data_type)] # Create and return timeseries df = pd.DataFrame(factor_exposures) df.set_index('date', inplace=True) df.index = pd.to_datetime(df.index) return _extract_series_from_df(df, query_type)
def result(self): """ :return: a Pandas DataFrame containing the results of the report job """ status = self.status() if status == ReportStatus.cancelled: raise MqValueError( 'This report job in status "cancelled". Cannot retrieve results.' ) if status == ReportStatus.error: raise MqValueError( 'This report job is in status "error". Cannot retrieve results.' ) if status != ReportStatus.done: raise MqValueError( 'This report job is not done. Cannot retrieve results.') if self.__report_type in [ ReportType.Portfolio_Factor_Risk, ReportType.Asset_Factor_Risk ]: results = GsReportApi.get_factor_risk_report_results( risk_report_id=self.__report_id, start_date=self.__start_date, end_date=self.__end_date) return pd.DataFrame(results) if self.__report_type == ReportType.Portfolio_Performance_Analytics: query = DataQuery(where={'reportId': self.__report_id}, start_date=self.__start_date, end_date=self.__end_date) results = GsDataApi.query_data( query=query, dataset_id=ReportDataset.PPA_DATASET.value) return pd.DataFrame(results) return None
def schedule(self, start_date: dt.date = None, end_date: dt.date = None, backcast: bool = None): if None in [self.id, self.__position_source_id]: raise MqValueError( 'Can only schedule reports with valid IDs and Position Source IDs.' ) if self.position_source_type != PositionSourceType.Portfolio and None in [ start_date, end_date ]: raise MqValueError( 'Must specify schedule start and end dates for report.') if None in [start_date, end_date]: position_dates = GsPortfolioApi.get_position_dates( self.position_source_id) if len(position_dates) == 0: raise MqValueError( 'Cannot schedule reports for a portfolio with no positions.' ) if start_date is None: start_date = business_day_offset(min(position_dates) - relativedelta(years=1), -1, roll='forward') \ if backcast else min(position_dates) if end_date is None: end_date = min( position_dates) if backcast else business_day_offset( dt.date.today(), -1, roll='forward') GsReportApi.schedule_report(report_id=self.id, start_date=start_date, end_date=end_date, backcast=backcast)
def get_data_series( self, field: Union[str, Fields], start: Optional[Union[dt.date, dt.datetime]] = None, end: Optional[Union[dt.date, dt.datetime]] = None, as_of: Optional[dt.datetime] = None, since: Optional[dt.datetime] = None, **kwargs ) -> pd.Series: """ Get a time series of data for a field of a dataset :param field: The DataSet field to use :param start: Requested start date/datetime for data :param end: Requested end date/datetime for data :param as_of: Request data as_of :param since: Request data since :param kwargs: Extra query arguments, e.g. ticker='EDZ19' :return: A Series of the requested data, indexed by date or time, depending on the DataSet **Examples** >>> from gs_quant.data import Dataset >>> import datetime as dt >>> >>> weather = Dataset('WEATHER') >>> dew_point = weather >>>> .get_data_series('dewPoint', dt.date(2016, 1, 15), dt.date(2016, 1, 16), city=('Boston', 'Austin')) """ field_value = field if isinstance(field, str) else field.value query = self.provider.build_query( start=start, end=end, as_of=as_of, since=since, fields=(field_value,), **kwargs ) symbol_dimensions = self.provider.symbol_dimensions(self.id) if len(symbol_dimensions) != 1: raise MqValueError('get_data_series only valid for symbol_dimensions of length 1') symbol_dimension = symbol_dimensions[0] data = self.provider.query_data(query, self.id) df = self.provider.construct_dataframe_with_types(self.id, data) from gs_quant.api.gs.data import GsDataApi if isinstance(self.provider, GsDataApi): gb = df.groupby(symbol_dimension) if len(gb.groups) > 1: raise MqValueError('Not a series for a single {}'.format(symbol_dimension)) if df.empty: return pd.Series() return pd.Series(index=df.index, data=df.loc[:, field_value].values)
def _convert_asset_for_mdapi_swap_rates(**kwargs) -> str: assets = GsAssetApi.get_many_assets(**kwargs) if len(assets) > 1: raise MqValueError('Specified arguments match multiple assets') elif len(assets) == 0: raise MqValueError('Specified arguments did not match any asset in the dataset') else: return assets[0].id
def _get_tdapi_crosscurrency_rates_assets(allow_many=False, **kwargs) -> Union[str, list]: # sanitize input for asset query. if "pricing_location" in kwargs: del kwargs["pricing_location"] assets = GsAssetApi.get_many_assets(**kwargs) # change order of basis swap legs and check if swap in dataset if len(assets) == 0 and ('asset_parameters_payer_rate_option' in kwargs): # flip legs kwargs['asset_parameters_payer_rate_option'], kwargs['asset_parameters_receiver_rate_option'] = \ kwargs['asset_parameters_receiver_rate_option'], kwargs['asset_parameters_payer_rate_option'] if 'asset_parameters_payer_designated_maturity' in kwargs: kwargs['asset_parameters_payer_designated_maturity'], kwargs[ 'asset_parameters_receiver_designated_maturity'] = \ kwargs['asset_parameters_receiver_designated_maturity'], kwargs[ 'asset_parameters_payer_designated_maturity'] if 'asset_parameters_payer_currency' in kwargs: kwargs['asset_parameters_payer_currency'], kwargs['asset_parameters_receiver_currency'] = \ kwargs['asset_parameters_receiver_currency'], kwargs['asset_parameters_payer_currency'] assets = GsAssetApi.get_many_assets(**kwargs) if len(assets) == 0 and ('asset_parameters_clearing_house' in kwargs): # test without the clearing house if kwargs[ 'asset_parameters_clearing_house'] == tm_rates._ClearingHouse.NONE.value: del kwargs['asset_parameters_clearing_house'] assets = GsAssetApi.get_many_assets(**kwargs) # change order of basis swap legs and check if swap in dataset if len(assets) == 0 and ('asset_parameters_payer_rate_option' in kwargs): # flip legs kwargs['asset_parameters_payer_rate_option'], kwargs['asset_parameters_receiver_rate_option'] = \ kwargs['asset_parameters_receiver_rate_option'], kwargs['asset_parameters_payer_rate_option'] if 'asset_parameters_payer_designated_maturity' in kwargs: kwargs['asset_parameters_payer_designated_maturity'], kwargs[ 'asset_parameters_receiver_designated_maturity'] = \ kwargs['asset_parameters_receiver_designated_maturity'], kwargs[ 'asset_parameters_payer_designated_maturity'] if 'asset_parameters_payer_currency' in kwargs: kwargs['asset_parameters_payer_currency'], kwargs['asset_parameters_receiver_currency'] = \ kwargs['asset_parameters_receiver_currency'], kwargs['asset_parameters_payer_currency'] assets = GsAssetApi.get_many_assets(**kwargs) if len(assets) > 1: # term structure measures need multiple assets if ('asset_parameters_termination_date' not in kwargs) or ('asset_parameters_effective_date' not in kwargs) or allow_many: return [asset.id for asset in assets] else: raise MqValueError('Specified arguments match multiple assets') elif len(assets) == 0: raise MqValueError( 'Specified arguments did not match any asset in the dataset' + str(kwargs)) else: return assets[0].id
def _check_window(series_length: int, window: Window): if series_length > 0 and isinstance(window.w, int) and isinstance( window.r, int): if window.w <= 0: raise MqValueError('Window value must be greater than zero.') if window.r > series_length or window.r < 0: raise MqValueError( 'Ramp value must be less than the length of the series and greater than zero.' )
def query_data(cls, query: Union[DataQuery, MDAPIDataQuery], dataset_id: str = None, asset_id_type: Union[GsIdType, str] = None) \ -> Union[MDAPIDataBatchResponse, DataQueryResponse, tuple]: if isinstance(query, MDAPIDataQuery) and query.market_data_coordinates: # Don't use MDAPIDataBatchResponse for now - it doesn't handle quoting style correctly results: Union[MDAPIDataBatchResponse, dict] = GsSession.current._post( '/data/coordinates/query', payload=query) if isinstance(results, dict): return results.get('responses', ()) else: return results.responses if results.responses is not None else () elif isinstance(query, DataQuery) and query.where: where = query.where.as_dict() if isinstance(query.where, FieldFilterMap) else query.where xref_keys = set(where.keys()).intersection(XRef.properties()) if xref_keys: # Check that assetId is a symbol dimension of this data set. If not, we need to do a separate query # to resolve xref pip install dtaidistance--> assetId if len(xref_keys) > 1: raise MqValueError('Cannot not specify more than one type of asset identifier') definition = cls.get_definition(dataset_id) sd = definition.dimensions.symbolDimensions if definition.parameters.symbolStrategy == 'MDAPI' or ('assetId' not in sd and 'gsid' not in sd): xref_type = min(xref_keys) if asset_id_type is None: asset_id_type = xref_type xref_values = where[asset_id_type] xref_values = (xref_values,) if isinstance(xref_values, str) else xref_values asset_id_map = GsAssetApi.map_identifiers(xref_type, GsIdType.id, xref_values) if len(asset_id_map) != len(xref_values): raise MqValueError('Not all {} were resolved to asset Ids'.format(asset_id_type)) setattr(query.where, xref_type, None) query.where.assetId = [asset_id_map[x] for x in xref_values] results: Union[DataQueryResponse, dict] = GsSession.current._post('/data/{}/query'.format(dataset_id), payload=query) if isinstance(results, dict): results = results.get('data', ()) else: results = results.data if results.data is not None else () if asset_id_type not in {GsIdType.id, None}: asset_ids = tuple(set(filter(None, (r.get('assetId') for r in results)))) if asset_ids: xref_map = GsAssetApi.map_identifiers(GsIdType.id, asset_id_type, asset_ids) if len(xref_map) != len(asset_ids): raise MqValueError('Not all asset Ids were resolved to {}'.format(asset_id_type)) for result in results: result[asset_id_type] = xref_map[result['assetId']] return results
def backcast_reports(self, start_date: dt.date): position_dates = self.get_position_dates() if len(position_dates) == 0: raise MqValueError( 'Cannot backcast reports on a portfolio with no positions') earliest_position_date = min(position_dates) if start_date >= earliest_position_date: raise MqValueError( f'Backcasting start date must be before {earliest_position_date.strftime("%d %b, %Y")}' ) self._schedule_reports(start_date=start_date, end_date=earliest_position_date, backcast=True)
def check_underlier_fields(underlier: EqOption) -> bool: # validation for different fields if isinstance(underlier.expiration_date, datetime.date): raise MqValueError( 'Datetime.date format for expiration date field is not supported for backtest service' ) elif re.search(ISO_FORMAT, underlier.expiration_date) is not None: if datetime.datetime.strptime(underlier.expiration_date, "%Y-%m-%d"): raise MqValueError( 'Date format for expiration date field is not supported for backtest service' ) return True
def _get_tdapi_fxo_assets(**kwargs) -> Union[str, list]: # sanitize input for asset query. if "pricing_location" in kwargs: del kwargs["pricing_location"] assets = GsAssetApi.get_many_assets(**kwargs) if len(assets) > 1: raise MqValueError('Specified arguments match multiple assets' + str(kwargs)) elif len(assets) == 0: raise MqValueError( 'Specified arguments did not match any asset in the dataset' + str(kwargs)) else: return assets[0].id
def __init__(self, id_: str = None, *, entitlements: Union[Entitlements, Entitlements_] = None, dataviz_dict: dict = None): super().__init__(self.__class__.__name__, id_, entitlements=entitlements, dataviz_dict=dataviz_dict) self.type = SupportedFigure.PLOT self.__initialized = False sources = None if id_: if not self._viz_response: raise MqValueError( 'Unable to instantiate DataViz. Unable to fetch visualization entity.' ) sources = self._viz_response['sources'] elif dataviz_dict: sources = dataviz_dict[ 'sources'] if sources in dataviz_dict else None if sources: for source in sources: if source["type"] == DataVizSourceType.HEADER.value: if source["id"]: self.__header_id = source["id"] elif source["alias"]: self.__header_alias = source["alias"] else: raise MqValueError( 'Unable to instantiate DataViz. Header ID or alias missing.' ) elif source["type"] == DataVizSourceType.TEMPLATE.value: if source["id"]: self.__template_id = source["id"] elif source["alias"]: self.__template_alias = source["alias"] else: raise MqValueError( 'Unable to instantiate DataViz. Template ID or alias missing.' ) else: self.__header_id = None self.__header_alias = None self.__template_id = None self.__template_alias = None
def validate_ticker(cls, ticker: str): """ Validate basket ticker """ url = '/indices/validate' try: GsSession.current._post(url, payload=IndicesValidateInputs(ticker=ticker)) except HTTPError as err: raise MqValueError(f'Unable to validate ticker with {err}')
def get_market_data(cls, query, request_id=None) -> pd.DataFrame: GsSession.current: GsSession start = time.perf_counter() body = GsSession.current._post('/data/measures', payload=query) log_debug(request_id, _logger, 'market data query (%s) ran in %.3f ms', body.get('requestId'), (time.perf_counter() - start) * 1000) ids = [] parts = [] for e in body['responses']: container = e['queryResponse'][0] ids.extend(container.get('dataSetIds', ())) if 'errorMessages' in container: raise MqValueError( f"measure service request {body['requestId']} failed: {container['errorMessages']}" ) if 'response' in container: df = MarketDataResponseFrame(container['response']['data']) df.set_index('date' if 'date' in df.columns else 'time', inplace=True) df.index = pd.to_datetime(df.index) parts.append(df) log_debug(request_id, _logger, f'fetched data from {ids}') df = pd.concat(parts) if len(parts) > 0 else MarketDataResponseFrame() df.dataset_ids = tuple(ids) return df
def update_positions(self, position_sets: List[PositionSet], net_positions: bool = True): if self.positioned_entity_type == EntityType.PORTFOLIO: if not position_sets: return currency = GsPortfolioApi.get_portfolio(self.id).currency new_sets = [] for pos_set in position_sets: if pos_set.reference_notional is None: incorrect_set = any([ pos.quantity is None or pos.weight is not None for pos in pos_set.positions ]) if incorrect_set: raise MqValueError( 'If you would like to upload position sets without notionals, ' 'every position must have a quantity and cannot have a weight.' ) new_sets.append(pos_set) else: new_sets.append( self._convert_pos_set_with_weights(pos_set, currency)) GsPortfolioApi.update_positions( portfolio_id=self.id, position_sets=[p.to_target() for p in new_sets]) time.sleep(3) else: raise NotImplementedError
def __init__(self, gs_asset: GsAsset = None, **kwargs): self.__error_messages = None if gs_asset: if gs_asset.type.value not in BasketType.to_list(): raise MqValueError( f'Failed to initialize. Asset {gs_asset.id} is not a basket' ) self.__id = gs_asset.id self.__initial_entitlements = gs_asset.entitlements asset_entity: Dict = json.loads( json.dumps(gs_asset.as_dict(), cls=JSONEncoder)) Asset.__init__(self, gs_asset.id, gs_asset.asset_class, gs_asset.name, exchange=gs_asset.exchange, currency=gs_asset.currency, entity=asset_entity) PositionedEntity.__init__(self, gs_asset.id, EntityType.ASSET) self.__populate_current_attributes_for_existing_basket(gs_asset) else: self.__populate_default_attributes_for_new_basket(**kwargs) self.__error_messages = set([]) if get(kwargs, '_finish_init', False): self.__finish_initialization()
def _get_factor_data(report_id: str, factor_name: str, query_type: QueryType) -> pd.Series: # Check params report = FactorRiskReport.get(report_id) if factor_name not in ['Factor', 'Specific', 'Total']: if query_type in [QueryType.DAILY_RISK, QueryType.ANNUAL_RISK]: raise MqValueError( 'Please pick a factor name from the following: ["Total", "Factor", "Specific"]' ) model = FactorRiskModel.get(report.get_risk_model_id()) factor = model.get_factor(factor_name) factor_name = factor.name # Extract relevant data for each date col_name = query_type.value.replace(' ', '') col_name = decapitalize(col_name) data_type = decapitalize( col_name[6:]) if col_name.startswith('factor') else col_name factor_data = report.get_results(factors=[factor_name], start_date=DataContext.current.start_date, end_date=DataContext.current.end_date, return_format=ReturnFormat.JSON) factor_exposures = [{ 'date': d['date'], col_name: d[data_type] } for d in factor_data if d.get(data_type)] # Create and return timeseries df = pd.DataFrame(factor_exposures) if not df.empty: df.set_index('date', inplace=True) df.index = pd.to_datetime(df.index) return _extract_series_from_df(df, query_type)
def get(cls, report_id: str, acceptable_types: List[ReportType] = None): # This map cant be instantiated / stored at the top of this file, bc the Factor/RiskReport classes aren't # defined there. Don't know the best place to put this report_type_to_class_type = { ReportType.Portfolio_Factor_Risk: type(FactorRiskReport()), ReportType.Asset_Factor_Risk: type(FactorRiskReport()), ReportType.Portfolio_Performance_Analytics: type(PerformanceReport()) } report = GsReportApi.get_report(report_id=report_id) if acceptable_types is not None and report.type not in acceptable_types: raise MqValueError('Unexpected report type found.') if report.type in report_type_to_class_type: return report_type_to_class_type[report.type]( report_id=report.id, position_source_id=report.position_source_id, position_source_type=report.position_source_type, report_type=report.type, parameters=report.parameters, status=report.status) return Report(report_id=report.id, position_source_id=report.position_source_id, position_source_type=report.position_source_type, report_type=report.type, parameters=report.parameters, status=report.status)
def run_backtest(cls, backtest: Backtest, correlation_id: str = None) -> BacktestResult: """ :param backtest: definition of a backtest which should be run on Marquee API :param correlation_id: used for logging purposes; helps in tracking all the requests which ultimately serve the same purpose (e.g. calculating a backtest) :return: result of running the backtest """ request_headers = {'Content-Type': 'application/json;charset=utf-8'} if correlation_id is not None: request_headers["X-CorrelationId"] = correlation_id response = GsSession.current._post('/backtests/calculate', backtest, request_headers=request_headers) # map the response to backtest result if "Data" not in response: raise MqValueError('No Data in Response Message.') data = response['Data'] risks = response['RiskData'] if 'RiskData' in response else None portfolio = response['Portfolio'] if 'Portfolio' in response else None return BacktestResult(performance=data, risks=risks, portfolio=portfolio)
def __handle_rule(self, rule: str, result: date, week_mask: str, currencies: List[Union[Currency, str]] = None, exchanges: List[Union[ExchangeCode, str]] = None, holiday_calendar: List[date] = None) -> date: if rule.startswith('-'): number_match = search('\d+', rule[1:]) number = int(number_match.group(0)) * -1 if number_match else 0 rule_str = rule[number_match.endpos:] else: number_match = search('\d+', rule) if number_match: rule_str = rule[number_match.endpos - 1:] number = int(number_match.group(0)) else: rule_str = rule number = 0 if not rule_str: raise MqValueError(f'Invalid rule "{rule}"') try: rule_class = getattr(rules, f'{rule_str}Rule') return rule_class(result, results=result, number=number, week_mask=week_mask, currencies=currencies, exchanges=exchanges, holiday_calendar=holiday_calendar).handle() except AttributeError: raise NotImplementedError(f'Rule {rule} not implemented')
def update(self) -> Dict: """ Update your custom basket :return: dictionary containing asset id and report id **Usage** Make updates to your basket's metadata, pricing options, publishing options, or composition **See also** :func:`get_details` :func:`poll_status` :func:`create` """ self.__finish_populating_attributes_for_existing_basket() edit_inputs, rebal_inputs = self.__get_updates() if edit_inputs is None and rebal_inputs is None: raise MqValueError('Update failed: Nothing on the basket was changed') elif edit_inputs is not None and rebal_inputs is None: response = GsIndexApi.edit(self.id, edit_inputs) elif rebal_inputs is not None and edit_inputs is None: response = GsIndexApi.rebalance(self.id, rebal_inputs) else: response = self.__edit_and_rebalance(edit_inputs, rebal_inputs) gs_asset = GsAssetApi.get_asset(self.id) self.__latest_create_report = GsReportApi.get_report(response.report_id) self.__error_messages.remove(ErrorMessage.UNMODIFIABLE) self.__init__(gs_asset=gs_asset) return response.as_dict()
def resolve_entities(reference_list: List[Dict]): """ Utility function to fetch entities (assets, countries, etc.). Allows us to split functionality that requires data fetching. :param reference_list: A list of entity references (entityId and entityType dictionaries) :return: None """ entity_cache = {} for reference in reference_list: # Create a hash key of the entity data so we don't fetch the same entity multiple times. key = hash((reference.get(ENTITY_ID), reference.get(ENTITY_TYPE))) if key in entity_cache: entity = entity_cache[key] else: entity = Entity.get(reference.get(ENTITY_ID), 'MQID', reference.get(ENTITY_TYPE)) if reference[TYPE] == DATA_ROW: # If the reference is for a data row, simply set the entity of the row. reference[REFERENCE].entity = entity elif reference[TYPE] == PROCESSOR: # If the reference is for a processor, set the given parameter as the entity. setattr(reference[REFERENCE], reference[PARAMETER], entity) data_query_info = reference[REFERENCE].children.get( reference[PARAMETER]) if not data_query_info: raise MqValueError( f'{reference[PARAMETER]} does not exist in children of ' f'{reference[REFERENCE].__class__.__name__}') data_query_info.entity = entity
def resolve_entities(reference_list: List[Dict], entity_cache: Dict = None): """ Utility function to fetch entities (assets, countries, etc.). Allows us to split functionality that requires data fetching. :param reference_list: A list of entity references (entityId and entityType dictionaries) :param entity_cache: Map of entity id to the entity for external cache management :return: None """ entity_cache = entity_cache or {} for reference in reference_list: # Check if the entity is in the cache entity_id = reference.get(ENTITY_ID) if entity_id in entity_cache: entity = entity_cache[entity_id] else: try: entity = Entity.get(entity_id, 'MQID', reference.get(ENTITY_TYPE)) except MqRequestError as e: _logger.warning(e) entity = entity_id if reference[TYPE] == DATA_ROW: # If the reference is for a data row, simply set the entity of the row. reference[REFERENCE].entity = entity elif reference[TYPE] == PROCESSOR: # If the reference is for a processor, set the given parameter as the entity. setattr(reference[REFERENCE], reference[PARAMETER], entity) data_query_info = reference[REFERENCE].children.get(reference[PARAMETER]) if not data_query_info: raise MqValueError( f'{reference[PARAMETER]} does not exist in children of ' f'{reference[REFERENCE].__class__.__name__}') data_query_info.entity = entity