def patch_channel( channel_data: rest_models.Channel, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), channel: db_models.Channel = Depends(get_channel_or_fail), db=Depends(get_db), ): auth.assert_update_channel_info(channel.name) user_attrs = channel_data.dict(exclude_unset=True) if "size_limit" in user_attrs: auth.assert_set_channel_size_limit() changeable_attrs = ["private", "size_limit", "metadata"] for attr_ in user_attrs.keys(): if attr_ not in changeable_attrs: raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=f"attribute '{attr_}' of channel can not be changed", ) for attr_, value_ in user_attrs.items(): if attr_ == "metadata": metadata = channel.load_channel_metadata() metadata.update(value_) setattr(channel, "channel_metadata", json.dumps(metadata)) else: setattr(channel, attr_, value_) db.commit() return channel
def execute_channel_action( self, action: str, channel: db_models.Channel, start_at: Optional[datetime] = None, repeat_every_seconds: Optional[int] = None, ): auth = self.auth channel_name = channel.name channel_metadata = channel.load_channel_metadata() assert_channel_action(action, channel) user_id = auth.assert_user() if action == ChannelActionEnum.synchronize: auth.assert_synchronize_mirror(channel_name) extra_args = dict( channel_name=channel_name, includelist=channel_metadata.get('includelist', None), excludelist=channel_metadata.get('excludelist', None), ) task = self.jobs_dao.create_job( action.encode('ascii'), user_id, extra_args=extra_args, start_at=start_at, repeat_every_seconds=repeat_every_seconds, ) elif action == ChannelActionEnum.synchronize_repodata: auth.assert_synchronize_mirror(channel_name) extra_args = dict( channel_name=channel_name, use_repodata=True, includelist=channel_metadata.get('includelist', None), excludelist=channel_metadata.get('excludelist', None), ) task = self.jobs_dao.create_job( action.encode('ascii'), user_id, extra_args=extra_args, start_at=start_at, repeat_every_seconds=repeat_every_seconds, ) elif action == ChannelActionEnum.validate_packages: auth.assert_validate_package_cache(channel_name) extra_args = dict(channel_name=channel.name) task = self.jobs_dao.create_job( action.encode('ascii'), user_id, extra_args=extra_args, start_at=start_at, repeat_every_seconds=repeat_every_seconds, ) elif action == ChannelActionEnum.generate_indexes: auth.assert_reindex_channel(channel_name) extra_args = dict(channel_name=channel.name) task = self.jobs_dao.create_job( action.encode('ascii'), user_id, extra_args=extra_args, start_at=start_at, repeat_every_seconds=repeat_every_seconds, ) elif action == ChannelActionEnum.reindex: auth.assert_reindex_channel(channel_name) extra_args = dict(channel_name=channel_name, ) task = self.jobs_dao.create_job( action.encode('ascii'), user_id, extra_args=extra_args, start_at=start_at, repeat_every_seconds=repeat_every_seconds, ) elif action == ChannelActionEnum.synchronize_metrics: auth.assert_reindex_channel(channel_name) extra_args = dict(channel_name=channel.name) task = self.jobs_dao.create_job( action.encode('ascii'), user_id, extra_args=extra_args, start_at=start_at, repeat_every_seconds=repeat_every_seconds, ) elif action in [ ChannelActionEnum.cleanup, ChannelActionEnum.cleanup_dry_run ]: auth.assert_channel_db_cleanup(channel_name) dry_run = action == ChannelActionEnum.cleanup_dry_run extra_args = dict( channel_name=channel_name, dry_run=dry_run, ) task = self.jobs_dao.create_job( f"db_{action}".encode('ascii'), user_id, extra_args=extra_args, start_at=start_at, repeat_every_seconds=repeat_every_seconds, ) task = self.jobs_dao.create_job( f"pkgstore_{action}".encode('ascii'), user_id, extra_args=extra_args, start_at=start_at, repeat_every_seconds=repeat_every_seconds, ) else: raise HTTPException( status_code=status.HTTP_501_NOT_IMPLEMENTED, detail= (f"Action {action} on channel {channel.name} is not implemented" ), ) return task