def execute(self, context: t.Dict, session=None): context.update(self.op_kwargs) self.op_kwargs = determine_kwargs(self.python_callable, self.op_args, context) created_dr_ids = [] for conf in self.python_callable(*self.op_args, **self.op_kwargs): if not conf: break execution_date = timezone.utcnow() run_id = DagRun.generate_run_id(DagRunType.MANUAL, execution_date) dag_run = trigger_dag( dag_id=self.trigger_dag_id, run_id=run_id, conf=conf, execution_date=execution_date, replace_microseconds=False, ) created_dr_ids.append(dag_run.id) self.log.info("Created DagRun %s, %s - %s", dag_run, self.trigger_dag_id, run_id) if created_dr_ids: context['ti'].xcom_push(self.CREATED_DAGRUN_KEY, created_dr_ids) else: self.log.info("No DagRuns created")
def test_load_bad_data(agency_data_update: typing.Dict, agency_data: typing.Dict): # GIVEN agency_data.update(agency_data_update) # THEN with pytest.raises(mm.ValidationError): schemas.AgencySchema().load(agency_data)
def _get_region_params(region: typing.Dict, params: typing.Dict, interface_config: typing.Dict): region_type = region["type"] if region_type == "region": interface_config["region"] = "Region" v = region["admin_codes"] if isinstance(v, typing.List): v = ",".join(v) elif isinstance(v, int): v = str(v) params["adminCodes"] = v elif region_type == "rect": interface_config["region"] = "Rect" start_lat = region["start_latitude"] end_lat = region["end_latitude"] start_lon = region["start_longitude"] end_lon = region["end_longitude"] min_lat, max_lat = sorted([start_lat, end_lat]) min_lon, max_lon = sorted([start_lon, end_lon]) params.update({ "minLat": f"{min_lat}", "minLon": f"{min_lon}", "maxLat": f"{max_lat}", "maxLon": f"{max_lon}", }) elif region_type == "basin": interface_config["region"] = "Basin" v = region["basin_codes"] if isinstance(v, typing.List): v = ",".join(v) params["basinCodes"] = v else: raise ValueError(f"region type is not supported: {region_type}")
def test_load_bad_data(line_data_update: typing.Dict, line_data: typing.Dict): # GIVEN line_data.update(line_data_update) # THEN with pytest.raises(mm.ValidationError): schemas.LineSchema().load(line_data)
def test_load_bad_data(trip_data_update: typing.Dict, trip_data: typing.Dict): # GIVEN trip_data.update(trip_data_update) # THEN with pytest.raises(mm.ValidationError): schemas.TripSchema().load(trip_data)
def test_load_bad_data(stop_time_data_update: typing.Dict, stop_time_data: typing.Dict): # GIVEN stop_time_data.update(stop_time_data_update) # THEN with pytest.raises(mm.ValidationError): schemas.StopTimeSchema().load(stop_time_data)
def test_load_bad_data(direction_data_update: typing.Dict, direction_data: typing.Dict): # GIVEN direction_data.update(direction_data_update) # THEN with pytest.raises(mm.ValidationError): schemas.DirectionSchema().load(direction_data)
def test_load_bad_data(multi_route_trip_data_update: typing.Dict, multi_route_trip_data: typing.Dict): # GIVEN multi_route_trip_data.update(multi_route_trip_data_update) # THEN with pytest.raises(mm.ValidationError): schemas.MultiRouteTripSchema().load(multi_route_trip_data)
def test_load_bad_data(route_pattern_data_update: typing.Dict, route_pattern_data: typing.Dict): # GIVEN route_pattern_data.update(route_pattern_data_update) # THEN with pytest.raises(mm.ValidationError): schemas.RoutePatternSchema().load(route_pattern_data)
def test_load_bad_data(calendar_data_update: typing.Dict, calendar_data: typing.Dict): # GIVEN calendar_data.update(calendar_data_update) # THEN with pytest.raises(mm.ValidationError): schemas.CalendarSchema().load(calendar_data)
def _fill_level_value( level: int or float, filter_by_keys: typing.Dict, read_keys: typing.List ) -> typing.Tuple[typing.Dict, typing.List]: filter_by_keys.update({ "level": level, }) return filter_by_keys, read_keys
def test_load_bad_data( checkpoint_data_update: typing.Dict, checkpoint_data: typing.Dict ): # GIVEN checkpoint_data.update(checkpoint_data_update) # THEN with pytest.raises(mm.ValidationError): schemas.CheckpointSchema().load(checkpoint_data)
def from_record(record: typing.Dict) -> HighScore: skills = { name: Skill(**skill_dict) for name, skill_dict in record.pop('skills').items() } if 'xp_sum' in record: record.pop('xp_sum') record.update(skills) return HighScore(**record)
def test_missing_end_date(template: typing.Dict) -> None: template.update({'contract_period_start': datetime.date(2017, 1, 1)}) data = pd.DataFrame([template]) output = contract_date_cleaner.ContractDateCleaner().apply(data) assert output.to_dict('records') == [{ 'contract_date': datetime.date(2017, 1, 1), 'contract_period_start': datetime.date(2017, 1, 1), 'contract_period_end': datetime.date(2017, 1, 1), 'source_fiscal': datetime.date(2017, 1, 1), }]
def post(self: Resource, args: typing.Dict) -> typing.Tuple[typing.Dict, int]: """ 创建一个文档类型包括它的条款 """ args.update({'nlp_task_id': Common().get_nlp_task_id_by_route()}) args.update({"group_id": self.get_current_user().user_groups[0]}) result = DocTypeService().create_doc_type(self.get_current_user(), args) return { "message": "创建成功", "result": result, }, 201
def _fill_level( level_type: str, level: int, filter_by_keys: typing.Dict, read_keys: typing.List ) -> typing.Tuple[typing.Dict, typing.List]: filter_by_keys.update({ "typeOfLevel": level_type, "level": level, }) return filter_by_keys, read_keys
def post(self: Resource, args: typing.Dict) -> typing.Tuple[typing.Dict, int]: """ 创建一个文档类型包括它的条款 """ args.update({"nlp_task_id": NlpTaskEnum.relation.value}) args.update({"group_id": self.get_current_user().user_groups[0]}) result = DocTypeService.create_relation_doc_type(args) return { "message": "创建成功", "result": result, }, 201
def _add_rnn_uninferable(uninferable: typing.Dict, module) -> typing.Dict: if module.__name__ in ["RNN", "LSTM", "GRU"]: uninferable.update({ "input_size": "?", "num_layers": 1, "bias": False, "batch_first": False, "dropout": 0.0, "bidirectional": False, }) return uninferable
def apply(self, headers: typing.Dict, token: str = None): """Apply the token to the authentication header, as well as the API Key (specific to IG Market). Args: headers (Mapping): The HTTP request headers. token (Optional[str]): If specified, overrides the current access token. """ headers.update({ 'Authorization': f"Bearer {helpers.from_bytes(token or self.token)}", })
def update_source(self, source: t.Dict): # Find digest types that are both not null in self and set in the source to_update = { kind: digest for kind, digest in self._asdict().items() # pylint: disable=no-member if kind in source and digest is not None } if not to_update: # We don't have a common digest type with the source, bail out raise SourceUpdateError(f"No matching digest type for {self} in {source}") log.debug("Updating %s in %s", to_update.keys(), source) source.update(to_update)
def _fill_level_type( level_type: str or dict, filter_by_keys: typing.Dict, read_keys: typing.List ) -> typing.Tuple[typing.Dict, typing.List]: if isinstance(level_type, dict): filter_by_keys.update(level_type) read_keys.extend([key for key in level_type if key not in read_keys]) else: filter_by_keys.update({ "typeOfLevel": level_type, }) return filter_by_keys, read_keys
def _update_day_times(forecast_data: typing.Dict, forecast: typing.Dict) -> None: sunrise = datetime.fromtimestamp(forecast_data['sys']['sunrise'], tz=timezone.utc) sunset = datetime.fromtimestamp(forecast_data['sys']['sunset'], tz=timezone.utc) forecast_time = datetime.fromtimestamp(forecast_data['dt'], tz=timezone.utc) forecast.update({ 'forecast_time': forecast_time, 'sunrise_time': sunrise, 'sunset_time': sunset })
def get(self: Resource, args: typing.Dict) -> typing.Tuple[typing.Dict, int]: """ 获取所有条款,分页,可选排除条件exclude_terms_ids """ nlp_task_id = Common().get_nlp_task_id_by_route() args.update({ 'nlp_task_id': nlp_task_id }) result, count = DocTermService().get_doc_term_list(args) return { "message": "请求成功", "result": result, "count": count, }, 200
def _get_level_params(level_type, level, interface_config: typing.Dict, params: typing.Dict): if level is None: return def get_level(level_type, level): if level_type == "pl": interface_level_config = "Press" level_params_name = "pLayers" elif level_type in ("hgt", "gh"): interface_level_config = "Height" level_params_name = "hLayers" elif level_type == "vertical": interface_level_config = "Vertical" level_params_name = "verticals" elif level_type in ("fl", "flight_height"): interface_level_config = "Height" interface_config["name"] = "getUparArdEle" level_params_name = "fLayer" else: raise ValueError(f"level_type is not supported: {level_type}") level_params = dict() if isinstance(level, typing.List): level_params[level_params_name] = ",".join(level) if isinstance(level, pd.Interval): level_params[f"min{level_params_name.upper()}"] = level.left level_params[f"max{level_params_name.upper()}"] = level.right interface_level_config += "Range" else: params[level_params_name] = str(level) return interface_level_config, level_params if isinstance(level_type, str): interface_level_config, level_params = get_level(level_type, level) elif isinstance(level_type, typing.Tuple): interface_level_config = [] level_params = dict() for lt, l in zip(level_type, level): level_config, ps = get_level(lt, l) level_params.update(ps) interface_level_config.append(level_config) interface_level_config = "And".join(interface_level_config) else: raise TypeError(f"level_type is not supported: {level_type}") interface_config["level"] = interface_level_config params.update(level_params)
def get(self: Resource, args: typing.Dict) -> typing.Tuple[typing.Dict, int]: """ 获取所有文档条款 :param args: :return: """ nlp_task_id = Common().get_nlp_task_id_by_route() args.update({ 'nlp_task_id': nlp_task_id }) result, count = DocTypeService().get_doc_type(self.get_current_user(), args) return { "message": "请求成功", "result": result, "count": count, }, 200
def create_tokens(self, payload: typing.Dict) -> typing.Dict: """ Create and return auth JWT tokens :param payload: dict user info for :return: Bearer tokens dict(access_token, refresh_token) """ payload.update({'exp': datetime.utcnow() + self.__expired_time}) access_token = jwt.encode(payload, self.__secret_key, algorithm=self.__algorithm).decode('utf-8') payload.update( {'exp': datetime.utcnow() + self.__expired_time_refresh}) refresh_token = jwt.encode(payload, self.__secret_key_refresh, algorithm=self.__algorithm).decode('utf-8') bearer_tokens = { 'access_token': access_token, 'refresh_token': refresh_token, } return bearer_tokens
def lat_lon_to_solr(coreMetadata: typing.Dict, latitude: typing.SupportsFloat, longitude: typing.SupportsFloat): coreMetadata.update( shapely_to_solr(shapely.geometry.Point(longitude, latitude)))
def set_trace_id(self, obj: typing.Dict) -> typing.Dict: obj.update({X_APPLICATION_TRACE_ID: self.trace_id}) return obj
def _active_response(payload: typing.Dict) -> typing.Dict: payload.update({'active': True}) return payload