def __init__(self, a: DataCoordinateOrProcessor, *, start: Optional[DateOrDatetimeOrRDate] = None, end: Optional[DateOrDatetimeOrRDate] = None, w: Union[Window, int] = Window(None, 0), returns_type: Returns = Returns.SIMPLE, **kwargs): """ VolatilityProcessor :param a: DataCoordinate or BaseProcessor for the series to apply the volatility timeseries function :param start: start date or time used in the underlying data query :param end: end date or time used in the underlying data query :param w: Window or int: size of window and ramp up to use. e.g. Window(22, 10) where 22 is the window size and 10 the ramp up value. If w is a string, it should be a relative date like '1m', '1d', etc. Window size defaults to length of series. :param returns_type: returns type: simple, logarithmic or absolute """ super().__init__(**kwargs) # coordinate self.children['a'] = a self.start = start self.end = end self.w = w self.returns_type = returns_type
def __init__(self, a: DataCoordinateOrProcessor, benchmark: Entity, start: Optional[DateOrDatetimeOrRDate] = None, end: Optional[DateOrDatetimeOrRDate] = None, w: Union[Window, int] = Window(None, 0), type_: SeriesType = SeriesType.PRICES): """ Correlation Processor :param a: Coordinate :param benchmark: Benchmark Entity to correlate coordinate data to """ super().__init__() # coordinate self.children['a'] = a # Used for additional query self.benchmark: Entity = benchmark # datetime self.start = start self.end = end self.children['benchmark'] = self.get_benchmark_coordinate() # parameters self.w = w self.type_ = type_
def __init__(self, a: DataCoordinateOrProcessor, b: DataCoordinateOrProcessor, *, start: Optional[DateOrDatetimeOrRDate] = None, end: Optional[DateOrDatetimeOrRDate] = None, w: Union[Window, int] = Window(None, 0), **kwargs): """ BetaProcessor :param a: DataCoordinate or BaseProcessor for the first series :param b: DataCoordinate or BaseProcessor for the second series :param start: start date or time used in the underlying data query :param end: end date or time used in the underlying data query :param w: Window or int: size of window and ramp up to use. e.g. Window(22, 10) where 22 is the window size and 10 the ramp up value. If w is a string, it should be a relative date like '1m', '1d', etc. Window size defaults to length of series. **Usage** Calculate rolling `beta <https://en.wikipedia.org/wiki/Beta_(finance)>`_ If window is not provided, computes beta over the full series """ super().__init__(**kwargs) self.children['a'] = a self.children['b'] = b self.start = start self.end = end self.w = w
def __init__(self, a: DataCoordinateOrProcessor, *, benchmark: Entity, start: Optional[DateOrDatetimeOrRDate] = None, end: Optional[DateOrDatetimeOrRDate] = None, w: Union[Window, int] = Window(None, 0), type_: SeriesType = SeriesType.PRICES): """ CorrelationProcessor :param a: DataCoordinate or BaseProcessor for the series :param benchmark: benchmark to compare price series :param start: start date or time used in the underlying data query :param end: end date or time used in the underlying data query :param w: Window, int, or str: size of window and ramp up to use. e.g. Window(22, 10) where 22 is the window size and 10 the ramp up value. If w is a string, it should be a relative date like '1m', '1d', etc. Window size defaults to length of series. :param type_: type of both input series: prices or returns """ super().__init__() # coordinate self.children['a'] = a # Used for additional query self.benchmark: Entity = benchmark # datetime self.start = start self.end = end self.children['benchmark'] = self.get_benchmark_coordinate() # parameters self.w = w self.type_ = type_
def process(self, w: Union[Window, int] = Window(None, 0), type_: SeriesType = SeriesType.PRICES): a_data = self.children_data.get('a') excess_returns_data = self.children_data.get('excess_returns') if isinstance(a_data, ProcessorResult) and isinstance( excess_returns_data, ProcessorResult): if a_data.success and excess_returns_data.success: excess_returns = excess_returns_pure(a_data.data, excess_returns_data.data) ratio = get_ratio_pure(excess_returns, self.w) self.value = ProcessorResult(True, ratio)
def __init__(self, a: DataCoordinateOrProcessor, b: Optional[DataCoordinateOrProcessor] = None, start: Optional[DateOrDatetimeOrRDate] = None, end: Optional[DateOrDatetimeOrRDate] = None, w: Union[Window, int] = Window(None, 0)): """ Last Processor :param a: Value series to get the rolling percentiles :param b: Distribution series """ super().__init__() self.children['a'] = a self.children['b'] = b self.start = start self.end = end self.w = w
def process(self, w: Union[Window, int] = Window(None, 0), type_: SeriesType = SeriesType.PRICES): a_data = self.children_data.get('a') benchmark_data = self.children_data.get('benchmark') if isinstance(a_data, ProcessorResult) and isinstance( benchmark_data, ProcessorResult): if a_data.success and benchmark_data.success: result = correlation(a_data.data, benchmark_data.data, w=self.w, type_=SeriesType.PRICES) self.value = ProcessorResult(True, result) else: self.value = ProcessorResult( False, "Processor does not have A and Benchmark data yet") else: self.value = ProcessorResult( False, "Processor does not have A and Benchmark data yet")
def __init__(self, a: DataCoordinateOrProcessor, *, b: Optional[DataCoordinateOrProcessor] = None, start: Optional[DateOrDatetimeOrRDate] = None, end: Optional[DateOrDatetimeOrRDate] = None, w: Union[Window, int] = Window(None, 0)): """ PercentilesProcessor :param a: DataCoordinate or BaseProcessor for the first series :param b: DataCoordinate or BaseProcessor for the second series :param start: start date or time used in the underlying data query :param end: end date or time used in the underlying data query :param w: Window or int: size of window and ramp up to use. e.g. Window(22, 10) where 22 is the window size and 10 the ramp up value. If w is a string, it should be a relative date like '1m', '1d', etc. Window size defaults to length of series. """ super().__init__() self.children['a'] = a self.children['b'] = b self.start = start self.end = end self.w = w
import gs_quant.timeseries as ts from gs_quant.timeseries import Window x = ts.generate_series( 1000) # Generate random timeseries with 1000 observations vol = ts.volatility( x, Window(22, 0) ) # Compute realized volatility using a window of 22 and a ramp up value of 0 vol.tail() # Show last few values