def upgrade(): meta_catalogstar = sa.Column('meta', MutableDict.as_mutable(JSON), default={}) op.add_column('catalog_star', meta_catalogstar) meta_obs = sa.Column('meta', MutableDict.as_mutable(JSON), default={}) op.add_column('observation', meta_obs)
def downgrade(): op.drop_column('catalog', 'metajson') meta = sa.Column('meta', MutableDict.as_mutable(HSTORE), nullable=False, default={}, index=True) op.add_column('catalog', meta)
def define_tables(cls, metadata): import json class JSONEncodedDict(TypeDecorator): impl = VARCHAR(50) def process_bind_param(self, value, dialect): if value is not None: value = json.dumps(value) return value def process_result_value(self, value, dialect): if value is not None: value = json.loads(value) return value MutableDict = cls._type_fixture() Table('foo', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', MutableDict.as_mutable(JSONEncodedDict)), Column('non_mutable_data', JSONEncodedDict), Column('unrelated_data', String(50)) )
def define_tables(cls, metadata): MutableDict = cls._type_fixture() mutable_pickle = MutableDict.as_mutable(PickleType) Table( 'foo', metadata, Column( 'id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', mutable_pickle, default={}), )
def define_tables(cls, metadata): MutableDict = cls._type_fixture() mutable_pickle = MutableDict.as_mutable(PickleType) Table('foo', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('skip', mutable_pickle), Column('data', mutable_pickle), Column('non_mutable_data', PickleType), Column('unrelated_data', String(50)) )
def define_tables(cls, metadata): MutableDict = cls._type_fixture() mutable_pickle = MutableDict.as_mutable(PickleType) Table( "foo", metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True), Column("skip", mutable_pickle), Column("data", mutable_pickle), Column("non_mutable_data", PickleType), Column("unrelated_data", String(50)), )
class NapalmRollbackService(Service): __tablename__ = "NapalmRollbackService" id = Column(Integer, ForeignKey("Service.id"), primary_key=True) has_targets = True driver = Column(String) driver_values = NAPALM_DRIVERS use_device_driver = Column(Boolean, default=True) optional_args = Column(MutableDict.as_mutable(PickleType), default={}) __mapper_args__ = {"polymorphic_identity": "NapalmRollbackService"} def job(self, payload: dict, device: Device) -> dict: napalm_driver = self.napalm_connection(device) napalm_driver.open() napalm_driver.rollback() napalm_driver.close() return {"success": True, "result": "Rollback successful"}
class Article(Base): __tablename__ = 'articles_with_extra' uuid = Column(UUID(as_uuid=True), primary_key=True, default=uuid4) date = Column(TIMESTAMP(timezone=True), nullable=False, index=True) title = Column(String, nullable=False) content = Column(String, nullable=False) extra = Column(MutableDict.as_mutable(JSONB), nullable=False, default={}) __table_args__ = ( Index('ix_article_with_extra_extra_slug', extra['slug'].astext), Index('ix_article_with_extra_extra_tag', extra['tags'], postgresql_using='gin' ), ) def __repr__(self): return f"""# {self.title}
class UserDetail(db.Model): """Detailed information about a specific user.""" __tablename__ = 'user_details' user_id = db.Column(db.Uuid, db.ForeignKey('users.id'), primary_key=True) user = db.relationship('User', backref=db.backref('detail', uselist=False)) first_names = db.Column(db.UnicodeText) last_name = db.Column(db.UnicodeText) date_of_birth = db.Column(db.Date) country = db.Column(db.UnicodeText) zip_code = db.Column(db.UnicodeText) city = db.Column(db.UnicodeText) street = db.Column(db.UnicodeText) phone_number = db.Column(db.UnicodeText) internal_comment = db.Column(db.UnicodeText) extras = db.Column(MutableDict.as_mutable(db.JSONB)) @property def full_name(self) -> str: names = [self.first_names, self.last_name] return ' '.join(filter(None, names)) or None @property def age(self) -> int: """Return the user's current age.""" return calculate_age(self.date_of_birth, date.today()) @property def days_until_next_birthday(self) -> int: """Return the number of days until the user's next birthday.""" return calculate_days_until(self.date_of_birth, date.today()) @property def is_birthday_today(self) -> bool: """Return `True` if today is the user's birthday.""" return MonthDay.of(self.date_of_birth).matches(date.today()) def __repr__(self) -> str: return ReprBuilder(self) \ .add_with_lookup('user_id') \ .add_with_lookup('first_names') \ .add_with_lookup('last_name') \ .build()
class AnsibleScript(Script): __tablename__ = 'AnsibleScript' id = Column(Integer, ForeignKey('Script.id'), primary_key=True) vendor = Column(String) operating_system = Column(String) playbook_path = Column(String) arguments = Column(String) content_match = Column(String) content_match_regex = Column(Boolean) options = Column(MutableDict.as_mutable(PickleType), default={}) pass_device_properties = Column(Boolean) inventory_from_selection = Column(Boolean) device_multiprocessing = True __mapper_args__ = { 'polymorphic_identity': 'ansible_playbook', } @multiprocessing def job(self, task, device, results, payloads): try: arguments = self.arguments.split() command = ['ansible-playbook'] if self.pass_device_properties: command.extend(['-e', dumps(device.properties)]) if self.inventory_from_selection: command.extend(['-i', device.ip_address + ',']) command.append(self.playbook_path) result = check_output(command + arguments) try: result = result.decode('utf-8') except AttributeError: pass if self.content_match_regex: success = bool(search(self.content_match, str(result))) else: success = self.content_match in str(result) except Exception as e: success, result = False, str(e) return success, result, None
class MDMConfig(Base): __tablename__ = 'mdm_config' id = Column(Integer, primary_key=True) prefix = Column(String, nullable=False, unique=True) addl_config = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=True) topic = Column(String, nullable=False) # APNs Push Topic access_rights = Column(Integer, default=MDM_AR__ALL, nullable=False) mdm_url = Column(String, nullable=False) checkin_url = Column(String, nullable=False) mdm_name = Column(String, nullable=False) description = Column(String, nullable=True) ca_cert_id = Column(ForeignKey('certificate.id')) ca_cert = relationship('Certificate', foreign_keys=[ca_cert_id ]) # , backref='ca_cert_mdm_config' push_cert_id = Column(ForeignKey('certificate.id'), nullable=False) push_cert = relationship( 'Certificate', foreign_keys=[push_cert_id]) # , backref='push_cert_mdm_config' # note: we default to 'provide' here despite its lower security because # it requires no other dependencies, i.e. a better user experience device_identity_method = Column(Enum('ourscep', 'scep', 'provide'), default='provide', nullable=False) scep_url = Column(String, nullable=True) scep_challenge = Column(String, nullable=True) def base_url(self): # yuck, since we don't actually save the base URL in our MDMConfig we'll # have to compute it from the MDM URL by stripping off the trailing "/mdm" if self.mdm_url[-4:] == '/mdm': return self.mdm_url[:-4] else: return ''
class Task(CustomBase): __tablename__ = 'Task' id = Column(Integer, primary_key=True) name = Column(String, unique=True) creation_time = Column(String) status = Column(String) type = Column(String) user_id = Column(Integer, ForeignKey('User.id')) user = relationship('User', back_populates='tasks') logs = Column(MutableDict.as_mutable(PickleType), default={}) __mapper_args__ = {'polymorphic_identity': 'Task', 'polymorphic_on': type} def __init__(self, **data): self.status = 'active' self.name = data['name'] self.user = data['user'] self.creation_time = str(datetime.now())
class NetworkGroup(Base): __tablename__ = 'network_groups' id = Column(Integer, primary_key=True) name = Column(String(50), nullable=False) # can be nullable only for fuelweb admin net release = Column(Integer, ForeignKey('releases.id')) # can be nullable only for fuelweb admin net group_id = Column(Integer, ForeignKey('nodegroups.id'), nullable=True) vlan_start = Column(Integer) cidr = Column(psql.CIDR) gateway = Column(psql.INET) ip_ranges = relationship("IPAddrRange", backref="network_group", cascade="all, delete, delete-orphan") nodes = relationship("Node", secondary=IPAddr.__table__, backref="networks", passive_deletes=True) meta = Column(MutableDict.as_mutable(JSON), default={})
class Model(NameMixin, ORMBase): """ Abstract ORM base class for models. .. note:: Inheritance is implemented following the `SQLAlchemy Joined Table Inheritance <https://docs.sqlalchemy.org/en/latest/orm/inheritance.html#joined-table-inheritance>`_ paradigm. """ # XXX(damb): default model configuration parameters config = Column(MutableDict.as_mutable(JSONEncodedDict)) enabled = Column(Boolean, default=True) _type = Column(Enum(EModel)) __mapper_args__ = { 'polymorphic_identity': 'model', 'polymorphic_on': _type, }
class Worst(BaseModel): __tablename__ = "worst" __table_args__ = (UniqueConstraint("team_id", "label", "date", "period", name="team_label_date_period_uc"), ) __repr_fields__ = ("team", "label", "date", "period") team_id = db.Column(UUIDType(binary=False), db.ForeignKey("teams.id"), nullable=False) team = db.relationship("Team", back_populates="worst") date = db.Column(db.DateTime(timezone=True), nullable=False) label = db.Column(db.String(255), nullable=False) period = db.Column(db.Enum(Periods), nullable=False, unique=False) data = db.Column(MutableDict.as_mutable(JSONType), default={}, nullable=False)
class Launch(db.Model): __tablename__ = "launch" id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(300), nullable=False, unique=True) data = db.Column(MutableDict.as_mutable(db.JSON)) launch_status_id = db.Column( db.Integer, db.ForeignKey("launch_status.id"), nullable=False ) launch_status = db.relationship( "LaunchStatus", backref=db.backref("launch_status", lazy=True) ) project_id = db.Column(db.Integer, db.ForeignKey("project.id"), nullable=False) project = db.relationship( "Project", backref=db.backref("project", lazy=True, cascade=Constants.cascade_relations), ) def __repr__(self): return "<Launch {}>".format(self.name)
class NapalmRollbackService(Service): __tablename__ = 'NapalmRollbackService' id = Column(Integer, ForeignKey('Service.id'), primary_key=True) has_targets = True driver = Column(String) driver_values = NAPALM_DRIVERS optional_args = Column(MutableDict.as_mutable(PickleType), default={}) __mapper_args__ = { 'polymorphic_identity': 'NapalmRollbackService', } def job(self, device, _): napalm_driver = napalm_connection(self, device) napalm_driver.open() napalm_driver.rollback() napalm_driver.close() return {'success': True, 'result': 'Rollback successful'}
class AnsibleScript(Script): __tablename__ = 'AnsibleScript' id = Column(Integer, ForeignKey('Script.id'), primary_key=True) playbook_path = Column(String) options = Column(MutableDict.as_mutable(PickleType), default={}) __mapper_args__ = { 'polymorphic_identity': 'AnsibleScript', } def __init__(self, playbook_path, **data): name = data['name'][0] super(AnsibleScript, self).__init__(name) self.playbook_path = playbook_path self.options = {} for key, value in data.items(): if key in ansible_options: self.options[key] = value[0] if value else None
class OpenstackConfig(Base): __tablename__ = 'openstack_configs' id = Column(Integer, primary_key=True) is_active = Column(Boolean, nullable=False, default=True) config_type = Column(Enum(*consts.OPENSTACK_CONFIG_TYPES, name='openstack_config_types'), nullable=False) # asaprykin: In case there will be global configuration # nullable should be set to 'True' cluster_id = Column(Integer, ForeignKey('clusters.id'), nullable=False) node_id = Column(Integer, ForeignKey('nodes.id'), nullable=True) node_role = Column(String(consts.ROLE_NAME_MAX_SIZE), nullable=True) created_at = Column(DateTime, nullable=False, default=datetime.now) configuration = Column(MutableDict.as_mutable(JSON), nullable=False, default={}, server_default='{}')
class Event(db.Model): __tablename__ = "ses_events" created = Column(DateTime, nullable=False, server_default=sql.func.now()) email_id = Column( UUID(as_uuid=True), ForeignKey("ses_emails.id", deferrable=True, initially="DEFERRED"), nullable=False, ) event_id = Column(Text, nullable=False, unique=True, index=True) event_type = Column(Enum(EventTypes, values_callable=lambda x: [e.value for e in x]), nullable=False) data = Column(MutableDict.as_mutable(JSONB), nullable=False, server_default=sql.text("'{}'"))
class IterationService(Service): __tablename__ = "IterationService" id = Column(Integer, ForeignKey("Service.id"), primary_key=True) has_targets = Column(Boolean, default=False) iterated_job_id = Column(Integer, ForeignKey("Job.id")) iterated_job = relationship( "Job", primaryjoin="Job.id == IterationService.iterated_job_id" ) origin_of_values = Column( String(SMALL_STRING_LENGTH), default="user_provided_values" ) yaql_query_values = Column(String(SMALL_STRING_LENGTH), default="") user_provided_values = Column(MutableDict.as_mutable(PickleType), default={}) variable_name = Column(String(SMALL_STRING_LENGTH), default="value") __mapper_args__ = {"polymorphic_identity": "IterationService"} def get_properties(self, *args): return {"iterated_job": self.iterated_job.name, **super().get_properties(*args)} def job( self, payload: dict, device: Optional[Device] = None, parent: Optional[Job] = None, ) -> dict: if self.origin_of_values == "user_provided_values": if device.name in self.user_provided_values: values = self.user_provided_values[device.name] else: values = self.user_provided_values["all"] else: query = self.sub(self.yaql_query_values, locals()) values = factory.YaqlFactory().create()(query).evaluate(data=payload) results = { value: self.iterated_job.job({self.variable_name: value, **payload}, device) for value in values } return {"success": True, "Iteration values": values, **results}
class Source(Entity): __tablename__ = 'sources' id = Column(Integer, primary_key=True) name = Column(String) type = Column(String) config = Column(MutableDict.as_mutable(JSON)) created_at = Column(TIMESTAMP(timezone=True), default=func.now()) last_edited = Column(TIMESTAMP(timezone=True), default=func.now(), onupdate=func.now()) pipelines = relationship('Pipeline', back_populates='source_') def __init__(self, name: str, source_type: str, config: dict): self._previous_config = {} self.config = config self.type = source_type self.name = name self.sample_data = None def config_changed(self) -> bool: return self.config != self._previous_config # todo refactor def __getattr__(self, attr): if attr == 'sample_data': return [] raise AttributeError( f'type object {type(self)} has no attribute {attr}') def to_dict(self) -> dict: return {'name': self.name, 'type': self.type, 'config': self.config} # todo refactor children def set_config(self, config): self.config = config @property def query_timeout(self) -> int: return int(self.config.get('query_timeout', 300))
class DEPProfile(Base): __tablename__ = 'dep_profile' id = Column(Integer, primary_key=True) mdm_config_id = Column(ForeignKey('mdm_config.id'), nullable=False) mdm_config = relationship('MDMConfig', backref='dep_profiles') dep_config_id = Column(ForeignKey('dep_config.id'), nullable=False) dep_config = relationship('DEPConfig', backref='dep_profiles') # DEP-assigned UUID for this DEP profile uuid = Column( String(36), index=True, nullable=True ) # should be unique but it's assigned to us so can't be null profile_data = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=False) def profile_name(self): return self.profile_data['profile_name']
class Project(db.Model): __tablename__ = "project" id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(300), nullable=False, unique=True) data = db.Column(MutableDict.as_mutable(db.JSON)) project_status_id = db.Column( db.Integer, db.ForeignKey("project_status.id"), nullable=False ) project_status = db.relationship( "ProjectStatus", backref=db.backref("project_status", lazy=True) ) test_suites = db.relationship( "TestSuite", cascade=Constants.cascade_relations, backref="project" ) smart_links = db.relationship( "SmartLinks", cascade=Constants.cascade_relations, backref="project" ) def __repr__(self): return "<Project {}>".format(self.name)
class Blogpost(db.Model, DomainObject): """A blog post associated to a given project""" __tablename__ = 'blogpost' #: Blogpost ID id = Column(Integer, primary_key=True) #: UTC timestamp when the blogpost is created created = Column(Text, default=make_timestamp) #: UTC timestamp when the blogpost is updated updated = Column(Text, default=make_timestamp) #: Project.ID for the Blogpost project_id = Column(Integer, ForeignKey('project.id', ondelete='CASCADE'), nullable=False) #: User.ID for the Blogpost user_id = Column(Integer, ForeignKey('user.id')) #: Title of the Blogpost title = Column(Unicode(length=255), nullable=False) #: Body of the Blogpost body = Column(UnicodeText, nullable=False) #: media_url Heading picture or cover for blogpost info = Column(MutableDict.as_mutable(JSONB), default=dict()) #: Media URL with cover photo for the blog post media_url = Column(Text) #: Published flag published = Column(Boolean, nullable=False, default=False) @classmethod def public_attributes(self): """Return a list of public attributes.""" return [ 'created', 'updated', 'project_id', 'id', 'user_id', 'title', 'body', 'media_url', 'published' ] @classmethod def public_info_keys(self): """Return a list of public info keys.""" return []
class NapalmPingService(Service): __tablename__ = "NapalmPingService" id = Column(Integer, ForeignKey("Service.id"), primary_key=True) has_targets = True count = Column(Integer) driver = Column(String(5000)) driver_values = NAPALM_DRIVERS use_device_driver = Column(Boolean, default=True) optional_args = Column(MutableDict.as_mutable(PickleType), default={}) size = Column(Integer) destination_ip = Column(String(5000)) source_ip = Column(String(5000)) timeout = Column(Integer) ttl = Column(Integer) vrf = Column(String(5000)) __mapper_args__ = {"polymorphic_identity": "NapalmPingService"} def job(self, payload: dict, device: Device) -> dict: napalm_driver = self.napalm_connection(device) napalm_driver.open() destination = self.sub(self.destination_ip, locals()) source = self.sub(self.source_ip, locals()) self.logs.append( f"Running napalm ping from {source}" f"to {destination} on {device.ip_address}" ) ping = napalm_driver.ping( destination=destination, source=source, vrf=self.vrf, ttl=self.ttl or 255, timeout=self.timeout or 2, size=self.size or 100, count=self.count or 5, ) napalm_driver.close() return {"success": "success" in ping, "result": ping}
class CatalogStar(Base): """SQLAlchemy table for representing an object in a `catalog`. Observations are associated with the `observation` table. """ __tablename__ = 'catalog_star' id = Column(Integer, primary_key=True) x = Column(Float) y = Column(Float) ra = Column(Float) dec = Column(Float) cfrac = Column(Float) # Reference catalog we belong to (Catalog defines relationship) catalog_id = Column(Integer, ForeignKey('catalog.id', ondelete='CASCADE')) # Reference the star we associate to star_id = Column(Integer, ForeignKey('star.id')) star = relationship("Star", foreign_keys="[CatalogStar.star_id]", backref=backref('catalog_stars', order_by=id)) # Relationship to Observation observations = relationship("Observation", backref="catalog_star", passive_deletes=True) meta = Column(MutableDict.as_mutable(JSON), default={}) def __init__(self, x, y, ra, dec, cfrac): self.x = x self.y = y assert ra >= 0. and ra <= 360. assert dec >= -90. and dec <= 90. self.ra = ra self.dec = dec self.cfrac = cfrac def __repr__(self): return "<CatalogStar(%i)>" % self.id
class Trakt(base): __tablename__ = 'trakt' id = Column(Integer, primary_key=True, autoincrement=True) username = Column(Text, default='') blacklist_name = Column(Text, default='') oauth_token = Column(MutableDict.as_mutable( CustomStringEncryptedType(JSONType, key=encryption_key)), default={}) remove_watchlist = Column(Boolean, default=False) remove_serieslist = Column(Boolean, default=False) remove_show_from_sickrage = Column(Boolean, default=False) sync_watchlist = Column(Boolean, default=False) method_add = Column(Enum(TraktAddMethod), default=TraktAddMethod.SKIP_ALL) start_paused = Column(Boolean, default=False) use_recommended = Column(Boolean, default=False) sync = Column(Boolean, default=False) sync_remove = Column(Boolean, default=False) series_provider_default = Column(Enum(SeriesProviderID), default=SeriesProviderID.THETVDB) timeout = Column(Integer, default=30) enable = Column(Boolean, default=False)
class Order(Base): __tablename__ = 'orders' id = Column(Integer, primary_key=True) set_of_book = Column(Unicode(20)) trans_id = Column(Unicode(100)) status = Column(Integer) e_id = Column(Unicode(50)) dev_id = Column(Integer) create_time = Column(DateTime) update_time = Column(DateTime) total_amount = Column(Float) products = Column(MutableDict.as_mutable(JSON)) created_at = Column(DateTime) created_user = Column(Unicode(20)) updated_at = Column(DateTime) updated_user = Column(Unicode(20)) is_imported = Column(Boolean, default=False) imported_at = Column(DateTime) imported_user = Column(Unicode(50)) retail_id = Column(Integer) retail_code = Column(Unicode(50))
class Proxy(Base): __tablename__ = 'proxy' id = Column(Integer, primary_key=True) ip = Column(String(64)) port = Column(String(64)) # http:0, https:1, http/https:2 protocol = Column(Integer, default=-1) # 高匿:0, 匿名:1, 透明:2 nick_type = Column(Integer, default=-1) # speed : -1, ip 不可用 speed = Column(Float, default=-1) area = Column(String(255), default='') score = Column(MutableDict.as_mutable(JSON), default={ 'score': settings.MAX_SCORE, 'power': 0 }) # 代理 ip 的不可用域名列表 disable_domain = Column(MutableList.as_mutable(JSON), default=[]) origin = Column(String(128), default='') create_time = Column(DateTime(timezone=True), default=func.now())
class SearchProvidersMixin(object): id = Column(Integer, primary_key=True, autoincrement=True) provider_id = Column(Text, unique=True) sort_order = Column(Integer, default=0) search_mode = Column(Text, default='eponly') search_separator = Column(Text, default=' ') cookies = Column(Text, default='') proper_strings = Column(Text, default=','.join( ['PROPER', 'REPACK', 'REAL', 'RERIP'])) private = Column(Boolean, default=False) supports_backlog = Column(Boolean, default=True) supports_absolute_numbering = Column(Boolean, default=False) anime_only = Column(Boolean, default=False) search_fallback = Column(Boolean, default=False) enable_daily = Column(Boolean, default=True) enable_backlog = Column(Boolean, default=True) enable_cookies = Column(Boolean, default=False) custom_settings = Column(MutableDict.as_mutable( CustomStringEncryptedType(JSONType, key=encryption_key)), default={}) enable = Column(Boolean, default=False)
class AssetData(db.Model): __tablename__ = "asset_data_hstore" id = db.Column(db.Integer, primary_key=True) asset_name = db.Column(db.String(64), index=True, unique=True) asset_data = db.Column(MutableDict.as_mutable(HSTORE)) # def __init__(self, asset_name, asset_data): # self.asset_data = asset_data # self.asset_name = asset_name def __repr__(self): return f"<Data Stream {self.asset_name}, {self.asset_data}>" @classmethod def return_all(cls): def to_json(x): return {"asset name": x.asset_name, "asset data": x.asset_data} return { "DataStreams": list(map(lambda x: to_json(x), AssetData.query.all())) }
def upgrade(): op.create_table( "job", sa.Column("id", sa.Integer, primary_key=True), sa.Column("job_id", sa.String), sa.Column("state", sa.Enum(State, name="job_state")), sa.Column("started_at", sa.DateTime), sa.Column("ended_at", sa.DateTime), sa.Column("payload", MutableDict.as_mutable(JSONEncodedDict)), sa.Column("payload_flags", IntFlag, default=0), ) op.create_table( "plugin_settings", sa.Column("label", sa.String(), nullable=True), sa.Column("description", sa.Text(), nullable=True), sa.Column("name", sa.String(), nullable=False), sa.Column("namespace", sa.String(), nullable=True), sa.Column("value", sa.PickleType(), nullable=True), sa.Column("enabled", sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint("name", "namespace"), )
class ClusterPlugins(Base): __tablename__ = 'cluster_plugins' id = Column(Integer, primary_key=True) plugin_id = Column(Integer, ForeignKey('plugins.id', ondelete='CASCADE'), nullable=False) cluster_id = Column(Integer, ForeignKey('clusters.id', ondelete='CASCADE'), nullable=False) enabled = Column(Boolean, nullable=False, default=False, server_default='false') # Initially, 'attributes' is a copy of 'Plugin.attributes_metadata'. # We need this column in order to store in there the modified (by user) # version of attributes, because we don't want to store them in cluster # attributes with no chance to remove. attributes = Column(MutableDict.as_mutable(JSON), nullable=False, server_default='{}')
class NapalmPingService(Service): __tablename__ = "NapalmPingService" id = Column(Integer, ForeignKey("Service.id"), primary_key=True) has_targets = True count = Column(Integer, default=0) driver = Column(String(SMALL_STRING_LENGTH), default="") use_device_driver = Column(Boolean, default=True) optional_args = Column(MutableDict.as_mutable(PickleType), default={}) packet_size = Column(Integer, default=0) destination_ip = Column(String(SMALL_STRING_LENGTH), default="") source_ip = Column(String(SMALL_STRING_LENGTH), default="") timeout = Column(Integer, default=0) ttl = Column(Integer, default=0) vrf = Column(String(SMALL_STRING_LENGTH), default="") __mapper_args__ = {"polymorphic_identity": "NapalmPingService"} def job(self, payload: dict, device: Device, parent: Optional[Job] = None) -> dict: napalm_connection = self.napalm_connection(device, parent) destination = self.sub(self.destination_ip, locals()) source = self.sub(self.source_ip, locals()) self.logs.append(f"Running napalm ping from {source}" f"to {destination} on {device.ip_address}") ping = napalm_connection.ping( destination=destination, source=source, vrf=self.vrf, ttl=self.ttl or 255, timeout=self.timeout or 2, size=self.packet_size or 100, count=self.count or 5, ) return {"success": "success" in ping, "result": ping}
class TestRun(db.Model): __tablename__ = "test_run" id = db.Column(db.Integer, primary_key=True) data = db.Column(MutableDict.as_mutable(db.JSON)) start_datetime = db.Column(db.DateTime) end_datetime = db.Column(db.DateTime) test_type = db.Column(db.String(100), nullable=False) environment = db.Column(db.String(2000)) test_run_status_id = db.Column( db.Integer, db.ForeignKey("test_run_status.id"), nullable=False ) test_run_status = db.relationship( "TestRunStatus", backref=db.backref("test_run_status", lazy=True) ) launch_id = db.Column(db.Integer, db.ForeignKey("launch.id"), nullable=False) launch = db.relationship( "Launch", backref=db.backref("launch", lazy=True, cascade=Constants.cascade_relations), ) def __repr__(self): return "<TestRun {}>".format(self.id)
impl = VARCHAR def process_bind_param(self, value, dialect): if value is not None: value = str(value) return value def process_result_value(self, value, dialect): if value is not None: value = ast.literal_eval(value) return value ARRAY_TYPE = Array() JSON_TYPE = MutableDict.as_mutable(JSONEncodedDict) BASE = declarative_base() class Product(BASE): """docstring for Product.""" __tablename__ = "products" id = Column('id', Integer, primary_key=True, autoincrement=False) style_no = Column('style_no', String) title = Column('title', String) keywords = Column('keywords', ARRAY_TYPE) owner = Column('owner', String) modify_time = Column('modify_time', Date) update = Column('update', Date, default=date.today)
def hstore_table_for(self, name): return sa.Table(name, self.Base.metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('data', MutableDict.as_mutable(pg.HSTORE)))
def get_data_element_postgres_extensions(task, indexes): if indexes: q = task.__class__.data[indexes] else: q = task.__class__.data s = object_session(task) tup = s.query(q).filter_by(id=task.id).one() return tup[0] class json_array_length(GenericFunction): type = Integer def get_data_size_postgres_extensions(task, indexes): if indexes: q = task.__class__.data[indexes] else: q = task.__class__.data s = object_session(task) tup = s.query(json_array_length(q)).filter_by(id=task.id).one() return tup[0] MutableJSONDict = MutableDict.as_mutable(psqlJSON) JSON = psqlJSON get_data_element = get_data_element_postgres_extensions get_data_size = get_data_size_postgres_extensions
#XXX NOTICE XXX DO NOT NAME THINGS types.py it breaks EVERYTHING from sqlalchemy.types import PickleType from sqlalchemy.dialects import postgres,postgresql from sqlalchemy.ext.mutable import MutableDict def array_base(column_type): array = PickleType() array.with_variant(postgres.ARRAY(column_type), 'postgres') array.with_variant(postgres.ARRAY(column_type), 'postgresql') return array Array=array_base _DictType = MutableDict.as_mutable(PickleType) _DictType.with_variant(MutableDict.as_mutable(postgres.HSTORE), 'postgres') #_DictType.with_variant(MutableDict.as_mutable(postgresql.HSTORE), 'postgresql') #_DictType.with_variant(MutableDict.as_mutable(postgresql.HSTORE), 'psycopg2') #_DictType.with_variant(MutableDict.as_mutable(postgresql.HSTORE), 'postgresql+psycopg2') DictType=_DictType #FIXME not working as hstore :/ __all__=[ 'Array', 'DictType', ] #ArrayFloat = PickleType() #ArrayFloat.with_variant(postgresql.ARRAY(Float), 'postgresql') #ArrayString = PickleType() #ArrayString.with_variant(postgresql.ARRAY(String), 'postgresql')
@author: peterb ''' from blueshed.model_helpers.sqla_views import view from blueshed.model_helpers.sql_extensions import JSONEncodedDict from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.sql.expression import select, join from sqlalchemy.sql.functions import func from examples.simple.model import * from blueshed.model_helpers.access_model import Person, Permission,\ person_permissions_permission Person._token = Column(String(80)) Person._preferences = Column(MutableDict.as_mutable(JSONEncodedDict(255))) Person.firstname = Column(String(80)) Person.lastname = Column(String(80)) Person.photo = Column(String(128)) ''' An example View ''' q = select([Person.id.label('id'), Person.email.label('email'), func.count(Permission.id).label('permission_count')]).\ select_from(join(Person, person_permissions_permission, Person.id==person_permissions_permission.c.permissions_id).\ join(Permission, Permission.id==person_permissions_permission.c.permission_id)).\
def upgrade(): metajson = sa.Column('metajson', MutableDict.as_mutable(JSON), default={}) op.add_column('catalog', metajson)