def write_output( path: str, stat_set: BaseModel, is_local: bool = False ) -> int: if settings.export_local: is_local = True if hasattr(stat_set, "json"): indent = None if settings.debug: indent = 4 write_content = stat_set.json(exclude_unset=True, indent=indent) else: write_content = json.dumps(stat_set) byte_count = 0 if is_local: byte_count = write_to_local(path, write_content) else: byte_count = write_to_s3(path, write_content) return byte_count
async def invoke_api_method( method_name: str, data: BaseModel, output_type: Type[OutputDataT], ) -> OutputDataT: """ Generic function which invokes an arbitrary method from Telegram Bot API :param method_name: name of the API method :param data: some data for the method :param output_type: type of response data :return: object of output type """ url = f"{TELEGRAM_BOT_API}/{method_name}" async with aiohttp.ClientSession() as session: response: ClientResponse async with session.post(url, json=data.dict()) as response: payload = await response.json() if response.status != status.HTTP_200_OK: debug(response) debug(payload) errmsg = ( f"method {method_name!r}" f" failed with status {response.status}" ) raise RuntimeError(errmsg) result = output_type.parse_obj(payload) return result
def save(self, model: BaseModel, section: str) -> None: subsections = section.split('/') value = self.__config for subsection in subsections: if subsection not in value: value[subsection] = {} value = value[subsection] value.update(model.dict()) with open(self.config_file, "w") as f: f.write(dump(self.__config, Dumper=Dumper))
def gen_args_from_params(name: str, params_type: BaseModel) -> GraphQLArgument: schema = params_type.schema() fields = {} required_field_names = schema.get("required", []) for field_name, field_schema in schema["properties"].items(): fields[field_name] = transform_validator_field( field_name, field_schema, field_name in required_field_names) return GraphQLArgument( GraphQLNonNull(InputObjectType( f"params_{name}", fields=fields, )))
def _add_parameter(self, sess: Session, task: Task, param: BaseModel): if task is None: raise PPLException(ErrorCode.TASK_NOT_EXISTS, None) data = Data() data.data = param.json() sess.add(data) if task.parameter is not None: db_param = task.parameter else: db_param = Parameter() db_param.data = data db_param.task = task sess.add(db_param)
def _dump_result(self, sess: Session, task_id: int, result: BaseModel): task = self._get_task(sess, task_id) if task is None: raise PPLException(ErrorCode.TASK_NOT_EXISTS, task_id) data = Data() data.data = result.json() sess.add(data) if task.result is not None: db_result = task.result else: db_result = Result() db_result.task = task db_result.data = data sess.add(db_result)
def create(collection_name: str, instance, base_model: BaseModel): table_instance = get_table_model(collection_name)() instance_dict: dict = convert_to_dict(instance) for key, value in instance_dict.items(): setattr(table_instance, key, value) session = Session(engine, future=True) try: session.add(table_instance) session.commit() except: session.rollback() raise finally: session.close() return base_model.parse_obj(instance)
def _save(service_name: str, filename: Path, model: BaseModel): output_path = filename if has_multiple_services: output_path = filename.parent / service_name / filename.name output_path.parent.mkdir(parents=True, exist_ok=True) click.echo(f"Creating {output_path} ...", nl=False) with output_path.open("wt") as fh: data = json.loads( model.json(exclude_unset=True, by_alias=True, exclude_none=True)) yaml.safe_dump(data, fh, sort_keys=False) click.echo("DONE")
def write_output( path: str, stat_set: BaseModel, is_local: bool = False, exclude_unset: bool = True, exclude: set = None, ) -> int: if settings.export_local: is_local = True if hasattr(stat_set, "json"): indent = None if settings.debug: indent = 4 write_content = stat_set.json(exclude_unset=exclude_unset, indent=indent, exclude=exclude) else: write_content = json.dumps(stat_set) byte_count = 0 if is_local: byte_count = write_to_local(path, write_content) elif isinstance(stat_set, str): byte_count = write_to_s3(stat_set, path) elif isinstance(stat_set, OpennemDataSet): byte_count = write_statset_to_s3(stat_set, path, exclude_unset=exclude_unset, exclude=exclude) elif isinstance(stat_set, BaseModel): byte_count = write_to_s3(write_content, path) else: raise Exception( "Do not know how to write content of this type to output") return byte_count
def test_generated_schema_same_as_original( pydantic_model: BaseModel, original_json_schema: str, diff_json_schemas: Callable, json_schema_dict: Callable, ): # TODO: create instead a fixture that returns a Callable and do these checks # on separate test_* files that follow the same package submodule's hierarchy # generated_schema = json.loads(pydantic_model.schema_json(indent=2)) original_schema = json_schema_dict(original_json_schema) # NOTE: A change is considered an addition when the destination schema has become more permissive relative to the source schema. For example {"type": "string"} -> {"type": ["string", "number"]}. # A change is considered a removal when the destination schema has become more restrictive relative to the source schema. For example {"type": ["string", "number"]} -> {"type": "string"}. # The addition and removal changes detected are returned in JsonSchema format. These schemas represent the set of values that have been added or removed. # run one direction original schema encompass generated one process_completion = diff_json_schemas(original_schema, generated_schema) assert ( process_completion.returncode == 0 ), f"Exit code {process_completion.returncode}\n{process_completion.stdout.decode('utf-8')}" # https://www.npmjs.com/package/json-schema-diff returns true (at least in WSL whatever the result) # ```false``` is returned at the end of the stdout assert "No differences found" in process_completion.stdout.decode( "utf-8"), process_completion.stdout.decode("utf-8") # run other way direction: generated one encompass original schema process_completion = diff_json_schemas(original_schema, generated_schema) assert ( process_completion.returncode == 0 ), f"Exit code {process_completion.returncode}\n{process_completion.stdout.decode('utf-8')}" # https://www.npmjs.com/package/json-schema-diff returns true (at least in WSL whatever the result) # ```false``` is returned at the end of the stdout assert "No differences found" in process_completion.stdout.decode( "utf-8"), process_completion.stdout.decode("utf-8")
def serialize_payload(self, payload: BaseModel): serialized = payload.json() return serialized.encode("utf-8")
def publish(self, payload: BaseModel, topic_name: str): topic_path = self.get_topic_path(topic_name) data = payload.json() Logger.info(f"Published virtual pub/sub message to '{topic_path}") Logger.info(data)
def create(self, item: BaseModel): return self.save(item.dict())
def on_execute(self, command: EndOfDayCommand) -> EndOfDayResult: @firestore.transactional def end_of_day(transaction: Transaction) -> EndOfDayResult: scoreboard = self.public_repo.get_scoreboard(transaction) state = self.state_repo.get(transaction) if not state.waivers_active: Logger.info("Waivers not active") week_complete = scoreboard.all_games_complete() if not week_complete: Logger.info("Week not yet complete") return EndOfDayResult(command=command) now = datetime.now(tz=pytz.UTC) hours_since_last_game = hours_since( scoreboard.last_game_start_time(), now) if hours_since_last_game < self.settings.min_stat_correction_hours: Logger.info( "Week complete but not enough time since last game", extra={ "hours_since": hours_since_last_game, "min_hours": self.settings.min_stat_correction_hours }) return EndOfDayResult(command=command) Logger.info( "Week is complete, enabling waivers and publishing end of week" ) state.waivers_active = True state.waivers_end = datetime.now().today() + timedelta(days=1) state.locks = Locks.reset() completed_week_number = scoreboard.week() state.current_week = completed_week_number + 1 self.state_repo.set(state, transaction) return EndOfDayResult( command=command, state=state, waivers_enabled=True, completed_week_number=completed_week_number) else: Logger.info( "Waivers are active, initializing waiver processing") state.waivers_active = False state.waivers_end = None self.state_repo.set(state, transaction) return EndOfDayResult(command=command, state=state, waivers_complete=True) transaction = self.state_repo.firestore.create_transaction() result: EndOfDayResult = end_of_day(transaction) if result.success and result.waivers_enabled: self.publisher.publish(BaseModel(), END_OF_WEEK_TOPIC) command = CalculateResultsCommand( week_number=result.completed_week_number) payload = LeagueCommandPushData( command_type=LeagueCommandType.CALCULATE_RESULTS, command_data=command.dict()) self.publisher.publish(payload, LEAGUE_COMMAND_TOPIC) if result.success and result.waivers_complete: self.publisher.publish(BaseModel(), END_OF_WAIVERS_TOPIC) command = ProcessWaiversCommand() payload = LeagueCommandPushData( command_type=LeagueCommandType.PROCESS_WAIVERS, command_data=command.dict()) self.publisher.publish(payload, LEAGUE_COMMAND_TOPIC) return result
def save_data(source: str, city: str, data: BaseModel) -> Path: file_path = make_filepath(source=source, city=city) with open(file_path, "w") as json_file: json.dump(data.json(), json_file) return file_path
def encode(model: BaseModel) -> str: """Convert the given model to a string""" return model.json()
def on_execute(self, command: UpdateGamesCommand) -> UpdateGamesResult: Logger.info(f"Updating games for week {command.week}") season = command.season if self.public_repo.get_switches().enable_score_testing: season = 2019 Logger.warn("SCORE TESTING SWITCH IS ENABLED") current_games = self.get_current_games(season, command.week) stored_games = self.get_stored_games(season, command.week) roster_added = False for game_id in current_games: current_game = current_games[game_id] stored_game = stored_games.get(game_id, None) if current_game.away_roster and (not stored_game or not stored_game.away_roster): roster_added = True if current_game.home_roster and (not stored_game or not stored_game.home_roster): roster_added = True game_updates = get_changed_games(current_games, stored_games) player_updates = get_changed_players(game_updates, stored_games) transaction = self.game_repo.firestore.create_transaction() locked_teams: List[str] = [] active_games_count = 0 opponents: Dict[str, str] = {} for game in current_games.values(): if game.event_status.event_status_id == EVENT_STATUS_POSTPONED: continue opponents[ game.teams.away.abbreviation] = game.teams.home.abbreviation opponents[ game.teams.home.abbreviation] = game.teams.away.abbreviation if game.event_status.has_started(): active_games_count += 1 locked_teams.append(game.teams.away.abbreviation) locked_teams.append(game.teams.home.abbreviation) all_games_active = active_games_count == len(current_games) new_locks_state = Locks.create(locked_teams, all_games_active) new_scoreboard = Scoreboard.create(current_games.values()) new_opponents = Opponents.create(opponents) @firestore.transactional def update_games(transaction, games: Dict[str, Game], players: List[GamePlayerStats]): state = self.state_repo.get() new_state = state.copy() new_state.locks = new_locks_state current_scoreboard = self.public_repo.get_scoreboard() current_opponents = self.public_repo.get_opponents() pending_player_updates = [] for player_update in players: player = self.player_repo.get(season, player_update.player.id, transaction) game_id = player_update.game_id if not player: player = from_game_player(player_update.player, player_update.team) if not player.game_stats: player.game_stats = {} player.game_stats[game_id] = PlayerGameStats( team=player.team, **player_update.stats.dict()) player.recalc_season_stats() # Should this be here? pending_player_updates.append(player) for game_id in games: game = game_updates[game_id] self.game_repo.set(season, game, transaction) for player in pending_player_updates: self.player_repo.set(season, player, transaction) if new_state.changed(state): self.state_repo.set(new_state, transaction) if new_scoreboard.changed(current_scoreboard): self.public_repo.set_scoreboard(new_scoreboard, transaction) if new_opponents.changed(current_opponents): self.public_repo.set_opponents(new_opponents, transaction) return pending_player_updates update_games(transaction, game_updates, player_updates) payloads = self.publish_changed_players(player_updates) if roster_added: self.publisher.publish(BaseModel(), UPDATE_PLAYERS_TOPIC) return UpdateGamesResult( command=command, changed_games=[game_updates[game_id] for game_id in game_updates], changed_players=payloads)