def test_get_date_params_with_time_zone(): time_zone_chatham = Timezone("Pacific/Chatham") # UTC+12:45 customer = Customer(id="id", time_zone=time_zone_chatham, is_manager_account=False) mock_start_date_chatham = pendulum.today(tz=time_zone_chatham).subtract(days=1).to_date_string() time_zone_honolulu = Timezone("Pacific/Honolulu") # UTC-10:00 customer_2 = Customer(id="id_2", time_zone=time_zone_honolulu, is_manager_account=False) mock_start_date_honolulu = pendulum.today(tz=time_zone_honolulu).subtract(days=1).to_date_string() mock_conversion_window_days = 14 incremental_stream_config = dict( conversion_window_days=mock_conversion_window_days, start_date=mock_start_date_chatham, api=MockGoogleAdsClient(SAMPLE_CONFIG), customers=[customer], ) stream = IncrementalGoogleAdsStream(**incremental_stream_config) start_date_chatham, end_date_chatham = get_date_params( start_date=mock_start_date_chatham, time_zone=customer.time_zone, range_days=stream.range_days ) incremental_stream_config.update({"start_date": mock_start_date_honolulu, "customers": [customer_2]}) stream_2 = IncrementalGoogleAdsStream(**incremental_stream_config) start_date_honolulu, end_date_honolulu = get_date_params( start_date=mock_start_date_honolulu, time_zone=customer_2.time_zone, range_days=stream_2.range_days ) assert start_date_honolulu != start_date_chatham and end_date_honolulu != end_date_chatham
def _deserialize(cls, encoded_var: Any) -> Any: # pylint: disable=too-many-return-statements """Helper function of depth first search for deserialization.""" # JSON primitives (except for dict) are not encoded. if cls._is_primitive(encoded_var): return encoded_var elif isinstance(encoded_var, list): return [cls._deserialize(v) for v in encoded_var] if not isinstance(encoded_var, dict): raise ValueError(f"The encoded_var should be dict and is {type(encoded_var)}") var = encoded_var[Encoding.VAR] type_ = encoded_var[Encoding.TYPE] if type_ == DAT.DICT: return {k: cls._deserialize(v) for k, v in var.items()} elif type_ == DAT.DAG: return SerializedDAG.deserialize_dag(var) elif type_ == DAT.OP: return SerializedBaseOperator.deserialize_operator(var) elif type_ == DAT.DATETIME: return pendulum.from_timestamp(var) elif type_ == DAT.TIMEDELTA: return datetime.timedelta(seconds=var) elif type_ == DAT.TIMEZONE: return Timezone(var) elif type_ == DAT.RELATIVEDELTA: if 'weekday' in var: var['weekday'] = relativedelta.weekday(*var['weekday']) # type: ignore return relativedelta.relativedelta(**var) elif type_ == DAT.SET: return {cls._deserialize(v) for v in var} elif type_ == DAT.TUPLE: return tuple([cls._deserialize(v) for v in var]) else: raise TypeError('Invalid type {!s} in deserialization.'.format(type_))
def test_should_works_with_datetime_formatter(self): decorator = format_parameters({"param_a": format_datetime}) endpoint = mock.MagicMock() decorated_endpoint = decorator(endpoint) decorated_endpoint(param_a='2020-01-01T0:0:00+00:00') endpoint.assert_called_once_with( param_a=DateTime(2020, 1, 1, 0, tzinfo=Timezone('UTC')))
def from_accounts(cls, accounts: Iterable[Iterable[Mapping[str, Any]]]): data_objects = [] for account_list in accounts: for account in account_list: time_zone_name = account.get("customer.time_zone") tz = Timezone(time_zone_name) if time_zone_name else "local" data_objects.append( cls(id=str(account["customer.id"]), time_zone=tz, is_manager_account=bool( account.get("customer.manager")))) return data_objects
def test_get_date_params_with_time_zone(): time_zone_chatham = Timezone("Pacific/Chatham") # UTC+12:45 mock_start_date_chatham = pendulum.today(tz=time_zone_chatham).subtract(days=1).to_date_string() time_zone_honolulu = Timezone("Pacific/Honolulu") # UTC-10:00 mock_start_date_honolulu = pendulum.today(tz=time_zone_honolulu).subtract(days=1).to_date_string() mock_conversion_window_days = 14 incremental_stream_config = dict( conversion_window_days=mock_conversion_window_days, start_date=mock_start_date_chatham, api=MockGoogleAdsClient(SAMPLE_CONFIG), time_zone=time_zone_chatham, ) start_date_chatham, end_date_chatham = IncrementalGoogleAdsStream(**incremental_stream_config).get_date_params( stream_slice={"segments.date": mock_start_date_chatham}, cursor_field="segments.date" ) incremental_stream_config.update({"start_date": mock_start_date_honolulu, "time_zone": time_zone_honolulu}) start_date_honolulu, end_date_honolulu = IncrementalGoogleAdsStream(**incremental_stream_config).get_date_params( stream_slice={"segments.date": mock_start_date_honolulu}, cursor_field="segments.date" ) assert start_date_honolulu != start_date_chatham and end_date_honolulu != end_date_chatham
def _deserialize(cls, encoded_var: Any) -> Any: """Helper function of depth first search for deserialization.""" # JSON primitives (except for dict) are not encoded. if cls._is_primitive(encoded_var): return encoded_var elif isinstance(encoded_var, list): return [cls._deserialize(v) for v in encoded_var] if not isinstance(encoded_var, dict): raise ValueError( f"The encoded_var should be dict and is {type(encoded_var)}") var = encoded_var[Encoding.VAR] type_ = encoded_var[Encoding.TYPE] if type_ == DAT.DICT: return {k: cls._deserialize(v) for k, v in var.items()} elif type_ == DAT.DAG: return SerializedDAG.deserialize_dag(var) elif type_ == DAT.OP: return SerializedBaseOperator.deserialize_operator(var) elif type_ == DAT.DATETIME: return pendulum.from_timestamp(var) elif type_ == DAT.POD: if not HAS_KUBERNETES: raise RuntimeError( "Cannot deserialize POD objects without kubernetes libraries installed!" ) pod = PodGenerator.deserialize_model_dict(var) return pod elif type_ == DAT.TIMEDELTA: return datetime.timedelta(seconds=var) elif type_ == DAT.TIMEZONE: return Timezone(var) elif type_ == DAT.RELATIVEDELTA: if 'weekday' in var: var['weekday'] = relativedelta.weekday( *var['weekday']) # type: ignore return relativedelta.relativedelta(**var) elif type_ == DAT.SET: return {cls._deserialize(v) for v in var} elif type_ == DAT.TUPLE: return tuple(cls._deserialize(v) for v in var) else: raise TypeError(f'Invalid type {type_!s} in deserialization.')
def __init__(self, cron: str, timezone: str | Timezone) -> None: self._expression = cron_presets.get(cron, cron) if isinstance(timezone, str): timezone = Timezone(timezone) self._timezone = timezone descriptor = ExpressionDescriptor(expression=self._expression, casing_type=CasingTypeEnum.Sentence, use_24hour_time_format=True) try: # checking for more than 5 parameters in Cron and avoiding evaluation for now, # as Croniter has inconsistent evaluation with other libraries if len(croniter(self._expression).expanded) > 5: raise FormatException() interval_description = descriptor.get_description() except (CroniterBadCronError, FormatException, MissingFieldException): interval_description = "" self.description = interval_description