class Action(db.Model): __tablename__ = 'actions' TYPES = ( ('path', _('Path')), ('code', _('Code')), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) name = db.Column(db.String(128), nullable=False) description = db.Column(db.String(512), nullable=False) value = db.Column(db.Text, nullable=False) type = db.Column(ChoiceType(TYPES)) enabled = db.Column(db.Boolean, nullable=False, default=True) formatter_id = db.Column(db.Integer, db.ForeignKey('formatters.id')) def __repr__(self): return "<Action(name=%s, description=%s)>" % (self.name, self.description) def __getattr__(self, item): return getattr(self.value_object, item) @cached_property def value_object(self) -> BaseAction: return import_string(self.value)()
def paginate_queryset(self, queryset, page_size): """Paginate the queryset, if needed.""" paginator = self.get_paginator( queryset, page_size, orphans=self.get_paginate_orphans(), allow_empty_first_page=self.get_allow_empty()) page_kwarg = self.page_kwarg page = self.path_kwargs.get(page_kwarg) or self.get_argument( page_kwarg, 1) try: page_number = int(page) except ValueError: if page == 'last': page_number = paginator.num_pages else: raise Http404( _("Page is not 'last', nor can it be converted to an int.") ) try: page = paginator.page(page_number) return paginator, page, page.object_list, page.has_other_pages() except InvalidPage as e: raise Http404( _('Invalid page (%(page_number)s): %(message)s') % { 'page_number': page_number, 'message': str(e) })
class IntervalSchedule(Model): __tablename__ = "interval_schedule" PERIOD_CHOICES = (('days', _('Days')), ('hours', _('Hours')), ('minutes', _('Minutes')), ('seconds', _('Seconds')), ('microseconds', _('Microseconds'))) id = db.Column(db.Integer, primary_key=True, autoincrement=True) every = db.Column(db.Integer, nullable=False) period = db.Column(ChoiceType(PERIOD_CHOICES)) periodic_tasks = db.relationship('PeriodicTask') @property def schedule(self): return schedules.schedule( datetime.timedelta(**{self.period.code: self.every})) @classmethod def from_schedule(cls, session, schedule, period='seconds'): every = max(schedule.run_every.total_seconds(), 0) obj = cls.filter_by(session, every=every, period=period).first() if obj is None: return cls(every=every, period=period) else: return obj def __str__(self): if self.every == 1: return _('every {0.period_singular}').format(self) return _('every {0.every} {0.period}').format(self) @property def period_singular(self): return self.period[:-1]
class Group(InternalAPIMixin, db.Model): __tablename__ = 'groups' TYPES = ( ('p', _('Personal')), ('m', _('Multiple')), ('c', _('Channel')), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) name = db.Column(db.String(128), nullable=False, unique=True) type = db.Column(ChoiceType(TYPES)) created = db.Column(db.DateTime, default=timezone.now) updated = db.Column(db.DateTime, onupdate=timezone.now) active = db.Column(db.Boolean, nullable=False, default=True) @as_future def get_messages(self, user_id=None, **kwargs) -> dict: default_kwargs = dict(active=True) if user_id is not None: default_kwargs.update(sender_id=user_id) default_kwargs.update(kwargs) data = {'model_name': 'Message', 'filter_data': default_kwargs} return self.internal_request('message', 'get_models', **data) @as_future def get_memberships(self, user_id=None, **kwargs): default_kwargs = dict(active=True) if user_id is not None: default_kwargs.update(user_id=user_id) default_kwargs.update(kwargs) return self.memberships.filter_by(**default_kwargs)
class Bundle(db.Model): __tablename__ = 'bundles' STATUSES = ( ('created', _('Created')), ('uploaded', _('Uploaded')), ('delivering', _('Delivering')), ('delivered', _('Delivered')), ('error', _('Error')), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) name = db.Column(db.String(128), nullable=False) key = db.Column(db.String(64), nullable=False) filename = db.Column(db.String(128), nullable=False, unique=True) hash = db.Column(JSONType, nullable=False) filter = db.Column(JSONType, nullable=False) status = db.Column(ChoiceType(STATUSES), nullable=False, default='created') group_id = db.Column( db.Integer, db.ForeignKey('bundle_groups.id'), nullable=False, index=True) @property def application_version(self): return self.group.application_version @property def application(self): return self.application_version.application @property def deployment_method(self): return self.application.deployment_method @property def size(self): return default_storage.size(self.filename) @property def url(self): deployment_method = self.deployment_method.get_method() return deployment_method.url(self.filename) def make_hash(self, filename=None, group=None): newhash = {} group = group or self.group filename = filename or self.filename with default_storage.open(filename) as fd: hasher = Hasher(fd.read()) for hash_type, hash_is_active in group.hash_types.items(): if not hash_is_active: # hash type not active, so skip continue hash_type = hash_type.lower() hashing_method = getattr(hasher, hash_type, None) if callable(hashing_method): newhash[hash_type] = hashing_method() return newhash def update_hash(self): self.hash = self.make_hash()
def authenticate_credentials(self, key): model = self.get_model() token = model.query.get(key) if not token: raise exceptions.AuthenticationFailed(_('Invalid token.')) if not token.user.is_active: raise exceptions.AuthenticationFailed( _('User inactive or deleted.')) return token.user, token
async def moderate(cls, action_type: str, reason: str, moderator: RemoteUser, user: RemoteUser, extra_data: Optional[dict] = None, finish_at=None, commit=True): data = dict(action_type=action_type, reason=reason, moderator_id=moderator.id, user_id=user.id, finish_at=finish_at, extra_data=extra_data) obj = cls(**data) db.session.add(obj) if commit: db.session.commit() await cls.send_email(user, subject=_('You are moderated'), message=reason, from_email=settings.DEFAULT_FROM_EMAIL, fail_silently=False, html_message=None) await cls.send_message(user, message=reason)
def task_create(self): schedule = CrontabSchedule.get_or_create(**self.plan) task = PeriodicTask.create(crontab=schedule, name=_('Start events generators pool'), task='event.tasks.events_generators_pool_run', args=json.dumps([self.id]), enabled=self.active) self.task_id = task.id
class AuditLog(InternalAPIMixin, db.Model): __tablename__ = 'audit_log' ACTIONS = ( ('create', _('Create')), ('update', _('Update')), ('delete', _('Delete')), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) created = db.Column(db.DateTime, default=timezone.now) service_name = db.Column(db.String(512), nullable=False) model_name = db.Column(db.String(512), nullable=False) object_id = db.Column(db.Integer, nullable=False) author_id = db.Column(db.Integer, nullable=False) action = db.Column(ChoiceType(ACTIONS), nullable=False) current_version = db.Column(db.Integer, nullable=False) previous_version = db.Column(db.Integer, nullable=False) async def get_author(self): return await self.internal_request('login', 'get_user', user_id=self.author_id) async def _get_version_object(self, version): kwargs = { 'model_name': self.model_name, 'object_id': self.object_id, 'version': version } return self.internal_request(service_name, 'object_version', **kwargs) async def get_current_object(self): return await self._get_version_object(version=self.current_version) async def get_previous_object(self): return await self._get_version_object(version=self.previous_version) async def recover(self): kwargs = { 'model_name': self.model_name, 'object_id': self.object_id, 'version': self.previous_version } await self.internal_request(service_name, 'object_recover', **kwargs)
class EventParticipation(InternalAPIMixin, db.Model): __tablename__ = 'event_participations' __table_args__ = ( db.UniqueConstraint('user_id', 'event_id'), ) STATUSES = ( ('joined', _('Joined')), ('leaved', _('Leaved')) ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) created_at = db.Column(db.DateTime, nullable=False, default=timezone.now) status = db.Column(ChoiceType(STATUSES)) payload = db.Column(JSONType, nullable=False, default={}) user_id = db.Column(db.Integer, nullable=False) event_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.Schema = self.get_schema_class() @classmethod def get_schema_class(cls): class _Schema(ma.Schema): class Meta: model = cls fields = ('payload', 'created_at', 'status', 'event') return _Schema def dumps(self) -> dict: return self.Schema().dump(self).data async def on_status_changed(self) -> None: user = await self.get_user() msg = { 'type': EVENT_PARTICIPATION_STATUS_CHANGED, 'data': self.dumps() } await user.send_message(message=json.dumps(msg), content_type='application/json') async def get_user(self): return await self.internal_request('login', 'get_user', user_id=self.user_id)
class BundlesGroup(db.Model): __tablename__ = 'bundle_groups' STATUSES = ( ('created', _('Created')), ('publishing', _('Publishing')), ('published', _('Published')), ('error', _('Error')), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) status = db.Column(ChoiceType(STATUSES), nullable=False, default='created') hash_types = db.Column(JSONType, nullable=False) bundles = db.relationship( 'Bundle', backref=db.backref('group'), lazy='dynamic', cascade='all, delete-orphan') version_id = db.Column( db.Integer, db.ForeignKey('application_versions.id'), nullable=False, index=True)
class Server(InternalAPIMixin, db.Model): __tablename__ = 'servers' STATUSES = ( ('active', _('Active')), ('failed', _('Failed')), ('overload', _('Overload')), ) id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(64), unique=True) location = db.Column(URLType, nullable=False, unique=True) geo_location_id = db.Column(db.Integer, db.ForeignKey('geo_locations.id')) last_heartbeat = db.Column(db.DateTime) status = db.Column(ChoiceType(STATUSES)) last_failure_tb = db.Column(db.Text) enabled = db.Column(db.Boolean, nullable=False, default=True) rooms = db.relationship('Room', backref='server', lazy='dynamic') cpu_load = db.Column(db.Float, nullable=False, default=0.0) ram_usage = db.Column(db.Float, nullable=False, default=0.0) @hybrid_property def active(self): return self.enabled and self.status == 'active' @classmethod async def get_optimal(cls, region_id): # TODO: return cls.query.filter_by(active=True).first() @as_future def heartbeat(self, report: Union[HeartbeatReport, RequestError]): if isinstance(report, RequestError): self.status = 'failed' self.last_failure_tb = traceback.format_tb(report.__traceback__) elif isinstance(report, HeartbeatReport): self.last_heartbeat = timezone.now() self.cpu_load = report.cpu_load self.ram_usage = report.ram_usage self.status = 'overload' if report.is_overload() else 'active' else: raise ValueError('`report` argument should be either instance of' 'HeartbeatReport or RequestError') self.save()
class AuthenticationForm(Form): """ Base class for authenticating users. Extend this to get a form that accepts username/password logins. """ username = StringField( _('Enter your username'), [validators.DataRequired(), validators.Length(min=4, max=25)], render_kw={'placeholder': _('Username')}) password = PasswordField(_('Enter your password'), [validators.DataRequired()], render_kw={'placeholder': _('Password')}) async def authenticate(self, request): user = await authenticate(request, **self.get_credentials()) if user is None: self.invalid_login_error() else: self.confirm_login_allowed(user) return user def get_credentials(self): return {'username': self.username.data, 'password': self.password.data} # noinspection PyMethodMayBeStatic def confirm_login_allowed(self, user): """ Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``ValidationError``. If the given user may log in, this method should return None. """ if not user.is_active: raise ValidationError(_('Inactive user')) def invalid_login_error(self) -> None: raise ValidationError(_('Invalid user'))
def confirm_login_allowed(self, user): """ Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``ValidationError``. If the given user may log in, this method should return None. """ if not user.is_active: raise ValidationError(_('Inactive user'))
class Transaction(db.Model): __tablename__ = 'transactions' STATUSES = ( ('new', _('New')), ('started', _('Started')), ('successful', _('Successful')), ('failed', _('Failed')), ) id = db.Column(UUIDType(binary=False), primary_key=True) started = db.Column(db.DateTime, nullable=False, default=timezone.now) finished = db.Column(db.DateTime) status = db.Column(ChoiceType(STATUSES), nullable=False, default='new') timeout = db.Column(db.Integer, nullable=False, default=0) master = db.Column(db.String(128)) # Name of master service def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._steps = [] self._steps_iterator = None @property def is_finished(self): return self.finished is not None def check_for_timeout(self): if not self.is_finished and 0 < self.timeout < timezone.now() - self.started: raise TransactionTimeoutError def append(self, step, *args, **kwargs): self._steps.append([step, args, kwargs]) self._steps_iterator = iter(self.steps) async def start(self): try: func, args, kwargs = self._steps_iterator.__next__() return await func(*args, **kwargs) except StopIteration: raise TransactionFinished
async def send_email_on_incoming_message(self, data, group, my_client): participants = self.get_participants(group) clients = [await self.get_client(s) for s in participants] clients.remove(my_client) recipient_list = (c.user.email for c in clients) loader = template.Loader(settings.TEMPLATE_PATH) subject = _('New incoming message') message = loader.load("incoming_message_email.txt").generate(**data) html_message = loader.load("incoming_message_email.html").generate(**data) from_email = settings.DEFAULT_FROM_EMAIL await send_mail( subject, message, from_email, recipient_list, fail_silently=False, html_message=html_message)
class UpdateLog(InternalAPIMixin, db.Model): __tablename__ = 'update_log' STATUS = ( ('success', _('Success')), ('error', _('Error')), ('running', _('Running')), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) started = db.Column(db.DateTime, default=timezone.now) finished = db.Column(db.DateTime) item_name = db.Column(db.String(128), nullable=False) author_id = db.Column(db.Integer, nullable=False) current_version = db.Column(db.Integer, nullable=False) previous_version = db.Column(db.Integer, nullable=False) last_failure_tb = db.Column(db.Text) async def get_author(self): return await self.internal_request('login', 'get_user', user_id=self.author_id)
class UtilsSessionHandler(UserHandlerMixin, JsonRPCSessionHandler): SUCCESSFUL_MESSAGE = _('Completed successfully.') @jsonrpc_method() @_util_internal_wrapper async def update(self, service_name, version=None): await connector.internal_request(service_name, 'update', version=version) @jsonrpc_method() @_util_internal_wrapper async def reload(self, service_name): await connector.internal_request(service_name, 'reload')
async def authenticate(self, request): auth = get_authorization_header(request).split() if not auth or auth[0].lower() != self.keyword.lower().encode(): return None if len(auth) == 1: msg = _('Invalid token header. No credentials provided.') raise exceptions.AuthenticationFailed(msg) elif len(auth) > 2: msg = _( 'Invalid token header. Token string should not contain spaces.' ) raise exceptions.AuthenticationFailed(msg) try: token = auth[1].decode() except UnicodeError: msg = _( 'Invalid token header. Token string should not contain invalid characters.' ) raise exceptions.AuthenticationFailed(msg) return await self.authenticate_credentials(token)
async def get(self, *args, **kwargs): self.object_list = self.get_queryset() allow_empty = self.get_allow_empty() if not allow_empty: # When pagination is enabled and object_list is a queryset, # it's better to do a cheap query than to load the unpaginated # queryset in memory. if self.get_paginate_by(self.object_list) is not None and hasattr( self.object_list, 'exists'): is_empty = not self.object_list.exists() else: is_empty = not self.object_list if is_empty: raise Http404( _("Empty list and '%(class_name)s.allow_empty' is False.") % { 'class_name': self.__class__.__name__, }) context = await self.get_context_data() return self.render(context)
async def warn(cls, action_type: str, reason: str, moderator: RemoteUser, user: RemoteUser, finish_at=None, extra_data: Optional[dict] = None): data = dict(action_type=action_type, reason=reason, moderator_id=moderator.id, user_id=user.id, extra_data=extra_data) obj = cls(**data) db.session.add(obj) try: warns_count = await cls.actions_query( user.id, action_type=action_type).count() threshold = cls.threshold_model.query.filter_by( action_type=action_type).first() if warns_count >= threshold.value: await cls.moderate(action_type, reason, moderator, user, extra_data, finish_at, commit=False) else: await cls.send_email(user, subject=_('You are warned'), message=reason, from_email=settings.DEFAULT_FROM_EMAIL, fail_silently=False, html_message=None) await cls.send_message(user, message=reason) db.session.commit() except Exception: db.session.rollback() raise
async def get(self, *args, **kwargs): # noinspection PyAttributeOutsideInit self.object_list = self.get_queryset() allow_empty = self.get_allow_empty() if not allow_empty: # When pagination is enabled and object_list is a queryset, # it's better to do a cheap query than to load the unpaginated # queryset in memory. if self.get_paginate_by(self.object_list) is not None: is_empty = not (await future_exec(self.object_list.exists)) else: is_empty = not self.object_list if is_empty: raise Http404( _("Empty list and '%(class_name)s.allow_empty' is False.") % { 'class_name': self.__class__.__name__, }) data = await self.get_context_data() self.write_json(data=data)
ADMINS = (('Lysenko Vladimir', '*****@*****.**'), ) SQLALCHEMY_DATABASE_URI = 'postgres://anthill_report@/anthill_report' LOCATION = 'http://*****:*****@localhost:5672' # ROUTES_CONF = 'report.routes' TEMPLATE_PATH = os.path.join(BASE_DIR, 'ui', 'templates') LOCALE_PATH = os.path.join(BASE_DIR, 'locale') # APPLICATION_CLASS = 'report.apps.AnthillApplication' APPLICATION_NAME = 'report' APPLICATION_VERBOSE_NAME = _('Report') APPLICATION_DESCRIPTION = _('User-submitted reporting service') APPLICATION_ICON_CLASS = 'icon-flag3' APPLICATION_COLOR = 'grey' # SERVICE_CLASS = 'report.services.Service' # UI_MODULE = 'report.ui' EMAIL_SUBJECT_PREFIX = '[Anthill: report] ' CACHES["default"]["LOCATION"] = "redis://localhost:6379/30" CACHES["default"]["KEY_PREFIX"] = "report.anthill" LOGGING = { 'version': 1,
class EventGeneratorPool(db.Model): __tablename__ = 'event_generator_pools' RUN_SCHEMES = ( ('all', _('All')), ('any', _('Any')), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) name = db.Column(db.String(128), nullable=False, unique=True) description = db.Column(db.String(512), nullable=False) generators = db.relationship('EventGenerator', backref='pool', lazy='dynamic') is_active = db.Column(db.Boolean, nullable=False, default=True) run_scheme = db.Column(ChoiceType(RUN_SCHEMES), default='any') last_run_at = db.Column(db.DateTime) total_run_count = db.Column(db.Integer, nullable=False, default=0) plan = db.Column(CrontabType) task_id = db.Column(db.Integer, db.ForeignKey('periodic_task.id')) task = db.relationship('PeriodicTask') @as_future def run(self): enabled_generators = self.generators.query.filter_by(enabled=True).all() if self.run_scheme is 'any': prepared_generators = [random.choice(enabled_generators)] elif self.run_scheme is 'all': prepared_generators = enabled_generators else: prepared_generators = [] for gen in enabled_generators: gen.is_active = False gen.save() for gen in prepared_generators: gen.is_active = True gen.save() @as_future def task_create(self): schedule = CrontabSchedule.get_or_create(**self.plan) task = PeriodicTask.create(crontab=schedule, name=_('Start events generators pool'), task='event.tasks.events_generators_pool_run', args=json.dumps([self.id]), enabled=self.active) self.task_id = task.id # self.save() @as_future def task_disable(self): self.task.enabled = False self.task.save() @as_future def task_enable(self): self.task.enabled = True self.task.save() @as_future def task_update(self): schedule = CrontabSchedule.get_or_create(**self.plan) self.task.crontab = schedule self.task.enabled = self.active self.task.save() @as_future def task_delete(self): self.task.delete() @hybrid_property def active(self) -> bool: return self.is_active
from anthill.framework.utils import timezone from anthill.framework.utils.asynchronous import as_future from anthill.framework.utils.translation import translate_lazy as _ from anthill.platform.api.internal import InternalAPIMixin from anthill.platform.auth import RemoteUser from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy_utils.types.json import JSONType from sqlalchemy_utils.types.choice import ChoiceType from datetime import timedelta from functools import partial from typing import Optional DEFAULT_MODERATION_WARNING_THRESHOLD = 3 ACTION_TYPES = ( ('ban_account', _('Ban account')), ('hide_message', _('Hide message')), ('ban_game', _('Ban in game')), ) class BaseModerationAction(InternalAPIMixin, db.Model): __abstract__ = True id = db.Column(db.Integer, primary_key=True, autoincrement=True) action_type = db.Column(ChoiceType(ACTION_TYPES), nullable=False) moderator_id = db.Column(db.Integer, nullable=False) user_id = db.Column(db.Integer, nullable=False) created_at = db.Column(db.DateTime, nullable=False, default=timezone.now) reason = db.Column(db.String(512), nullable=False) is_active = db.Column(db.Boolean, nullable=False, default=True)
('Lysenko Vladimir', '*****@*****.**'), ) SQLALCHEMY_DATABASE_URI = 'postgres://anthill_blog@/anthill_blog' LOCATION = 'http://*****:*****@localhost:5672' # ROUTES_CONF = 'blog.routes' TEMPLATE_PATH = os.path.join(BASE_DIR, 'ui', 'templates') LOCALE_PATH = os.path.join(BASE_DIR, 'locale') # APPLICATION_CLASS = 'blog.apps.AnthillApplication' APPLICATION_NAME = 'blog' APPLICATION_VERBOSE_NAME = _('Blog') APPLICATION_DESCRIPTION = _('Deliver news and patch notes feeds to the users') APPLICATION_ICON_CLASS = 'icon-feed' APPLICATION_COLOR = 'pink' # SERVICE_CLASS = 'blog.services.Service' # UI_MODULE = 'blog.ui' EMAIL_SUBJECT_PREFIX = '[Anthill: blog] ' LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': {
DEBUG = False ADMINS = (('Lysenko Vladimir', '*****@*****.**'), ) # Database uri example: SQLALCHEMY_DATABASE_URI = 'postgres://anthill_store@/anthill_store' LOCATION = 'http://*****:*****@localhost:5672' # ROUTES_CONF = 'store.routes' # APPLICATION_CLASS = 'store.apps.AnthillApplication' APPLICATION_NAME = 'store' APPLICATION_VERBOSE_NAME = _('Store') APPLICATION_DESCRIPTION = _('In-App purchasing with server validation') APPLICATION_ICON_CLASS = 'icon-cart' APPLICATION_COLOR = 'primary' # SERVICE_CLASS = 'store.services.Service' TEMPLATE_PATH = os.path.join(BASE_DIR, 'ui', 'templates') LOCALE_PATH = os.path.join(BASE_DIR, 'locale') EMAIL_SUBJECT_PREFIX = '[Anthill: store] ' CACHES["default"]["LOCATION"] = "redis://localhost:6379/23" CACHES["default"]["KEY_PREFIX"] = "store.anthill" LOGGING = { 'version': 1,
DEBUG = False ADMINS = (('Lysenko Vladimir', '*****@*****.**'), ) REGISTERED_SERVICES = os.path.join(BASE_DIR, '../registry.json') SQLALCHEMY_DATABASE_URI = 'postgres://anthill_discovery@/anthill_discovery' LOCATION = 'http://*****:*****@localhost:5672' # ROUTES_CONF = 'discovery.routes' # APPLICATION_CLASS = 'discovery.apps.AnthillApplication' APPLICATION_NAME = 'discovery' APPLICATION_VERBOSE_NAME = _('Discovery') APPLICATION_DESCRIPTION = _('Map each service location dynamically') APPLICATION_ICON_CLASS = 'icon-direction' APPLICATION_COLOR = 'danger' # SERVICE_CLASS = 'discovery.services.Service' EMAIL_SUBJECT_PREFIX = '[Anthill: discovery] ' TEMPLATE_PATH = os.path.join(BASE_DIR, 'ui', 'templates') LOCALE_PATH = os.path.join(BASE_DIR, 'locale') CACHES["default"]["LOCATION"] = "redis://localhost:6379/12" CACHES["default"]["KEY_PREFIX"] = "discovery.anthill" CACHES['services'] = {
('Lysenko Vladimir', '*****@*****.**'), ) SQLALCHEMY_DATABASE_URI = 'postgres://anthill_blog@/anthill_blog' LOCATION = 'http://*****:*****@localhost:5672' # ROUTES_CONF = 'blog.routes' TEMPLATE_PATH = os.path.join(BASE_DIR, 'ui', 'templates') LOCALE_PATH = os.path.join(BASE_DIR, 'locale') # APPLICATION_CLASS = 'blog.apps.AnthillApplication' APPLICATION_NAME = 'blog' APPLICATION_VERBOSE_NAME = _('Blog') APPLICATION_DESCRIPTION = _('Service description') APPLICATION_ICON_CLASS = 'icon-feed' APPLICATION_COLOR = 'pink' # SERVICE_CLASS = 'blog.services.Service' # UI_MODULE = 'blog.ui' EMAIL_SUBJECT_PREFIX = '[Anthill: blog] ' LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': {
ADMINS = (('Lysenko Vladimir', '*****@*****.**')) SQLALCHEMY_DATABASE_URI = 'postgres://anthill_game_master@/anthill_game_master' LOCATION = 'http://*****:*****@localhost:5672' # ROUTES_CONF = 'game_master.routes' TEMPLATE_PATH = os.path.join(BASE_DIR, 'ui', 'templates') LOCALE_PATH = os.path.join(BASE_DIR, 'locale') # APPLICATION_CLASS = 'game_master.apps.AnthillApplication' APPLICATION_NAME = 'game_master' APPLICATION_VERBOSE_NAME = _('Game') APPLICATION_DESCRIPTION = _('Manage game server instances') APPLICATION_ICON_CLASS = 'icon-steam' APPLICATION_COLOR = 'purple' # SERVICE_CLASS = 'game_master.services.Service' # UI_MODULE = 'game_master.ui' EMAIL_SUBJECT_PREFIX = '[Anthill: game_master] ' CACHES["default"]["LOCATION"] = "redis://localhost:6379/28" CACHES["default"]["KEY_PREFIX"] = "game_master.anthill" CACHES["controllers"] = { "BACKEND": "anthill.framework.core.cache.backends.redis.cache.RedisCache",