def run(self, runtime): from spire.schema import Schema name = self['schema'] interface = Schema.interface(name) if self['drop']: runtime.report('dropping schema %r' % name) interface.drop_schema() runtime.report('deploying schema %r to %r' % (name, interface.url)) interface.deploy_schema()
def run(self, runtime): schemas = self['schemas'] if schemas is None: schemas = [] for token, conf in self.assembly.filter_configuration('schema').iteritems(): schemas.append(conf['schema']) if not schemas: runtime.report('no schemas specified or configured; aborting') return from spire.schema import Schema for name in schemas: interface = Schema.interface(name) if not self['incremental']: interface.drop_schema() runtime.report('creating %r schema' % name) interface.create_schema()
def run(self, runtime): schemas = self['schemas'] if schemas is None: schemas = [] for token, conf in self.assembly.filter_configuration( 'schema').iteritems(): schemas.append(conf['schema']) if not schemas: runtime.report('no schemas specified or configured; aborting') return from spire.schema import Schema for name in schemas: interface = Schema.interface(name) if not self['incremental']: interface.drop_schema() runtime.report('creating %r schema' % name) interface.create_schema()
from scheme import current_timestamp from spire.core import Component, Dependency from spire.exceptions import TemporaryStartupError from spire.mesh import MeshDependency, MeshServer from spire.runtime import current_runtime, onstartup from spire.schema import Schema, SchemaDependency from docket import models from docket.bindings import platoon from docket.bundles import BUNDLES from docket.engine.archetype_registry import ArchetypeRegistry from docket.engine.registry import EntityRegistry from docket.resources import * schema = Schema('docket') RecurringTask = bind(platoon, 'platoon/1.0/recurringtask') Schedule = bind(platoon, 'platoon/1.0/schedule') EVERY_SIX_HOURS = Schedule(id='c53628ff-7b48-4f60-ba56-bea431fc7da2', name='every six hours', schedule='fixed', anchor=datetime(2000, 1, 1, 0, 0, 0), interval=21600) SYNC_ALL_ENTITIES = RecurringTask(id='7d715e10-0f00-476d-ace1-dc896d7da3e5', tag='synchronize-all-entities', schedule_id=EVERY_SIX_HOURS.id, retry_limit=0)
from alembic import context from spire.schema import Schema interface = Schema.interface('narrative') def run_migrations_offline(): context.configure(url=interface['url']) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
from alembic import context from spire.schema import Schema interface = Schema.interface('platoon') def run_migrations_offline(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) try: with context.begin_transaction(): context.run_migrations() finally: connection.close()
from alembic import context from spire.schema import Schema interface = Schema.interface('platoon') def run_migrations_offline(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) try: with context.begin_transaction(): context.run_migrations()
from alembic import context from spire.schema import Schema interface = Schema.interface('docket') def run_migrations_offline(): context.configure(url=interface['url']) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
from alembic import context from spire.schema import Schema interface = Schema.interface('tap') def run_migrations_offline(): context.configure(url=interface['url']) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
from alembic import context from spire.schema import Schema interface = Schema.interface('flux') def run_migrations_offline(): context.configure(url=interface['url']) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else:
from datetime import datetime from scheme import Integer, UTC from spire.core import Component, Configuration from spire.mesh import MeshServer from spire.schema import Schema from platoon import models from platoon.bundles import API from platoon.queue import TaskQueue schema = Schema('platoon') class APIServer(MeshServer): pass class Platoon(Component): configuration = Configuration({ 'completed_event_lifetime': Integer(nonnull=True, default=30), 'completed_task_lifetime': Integer(nonnull=True, default=30), }) api = APIServer.deploy(bundles=[API], path='/') @schema.constructor() def bootstrap_purge_task(session): schedule = models.Schedule( id='00000000-0000-0000-0000-000000000001', name='Purge Schedule', schedule='fixed', anchor=datetime(2000, 1, 1, 2, 0, 0, tzinfo=UTC),
from alembic import context from spire.schema import Schema interface = Schema.interface('keg') def run_migrations_offline(): context.configure(url=interface['url']) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
from alembic import context from spire.schema import Schema interface = Schema.interface('flux') def run_migrations_offline(): context.configure(url=interface['url']) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): engine = interface.get_engine() connection = engine.connect() context.configure(connection=connection, compare_type=True, target_metadata=interface.schema.metadata, sqlalchemy_module_prefix=None) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()