def define_tables(cls, metadata): Table('foo', metadata, Column('id', Integer, sa.Sequence('foo_id_seq'), primary_key=True), Column('bar', Integer), Column('range', Integer))
:param engine: the sql engine """ meta.engine = engine meta.Session.configure(bind=engine) return token_table = sa.Table( 'Token', meta.metadata, sa.Column('LinOtpTokenId', sa.types.Integer(), sa.Sequence('token_seq_id', optional=True), primary_key=True, nullable=False), sa.Column('LinOtpTokenDesc', sa.types.Unicode(80), default=u''), sa.Column('LinOtpTokenSerialnumber', sa.types.Unicode(40), default=u'', unique=True, nullable=False, index=True), sa.Column('LinOtpTokenType', sa.types.Unicode(30), default=u'HMAC', index=True), sa.Column('LinOtpTokenInfo', sa.types.Unicode(2000), default=u''), # # encrypt
""" log = __import__("logging").getLogger("bungeni.models.schema") import re import sqlalchemy as rdb from fields import FSBlob from sqlalchemy.sql import text, functions from datetime import datetime import domain import interfaces metadata = rdb.MetaData() # bills, motions, questions ItemSequence = rdb.Sequence("item_sequence") # users and groups because of the zope users and groups PrincipalSequence = rdb.Sequence("principal_sequence") # !+PARAMETRIZABLE_DOCTYPES def un_camel(name): """Convert a CamelCase name to lowercase underscore-separated. """ s1 = un_camel.first_cap_re.sub(r"\1_\2", name) return un_camel.all_cap_re.sub(r"\1_\2", s1).lower() un_camel.first_cap_re = re.compile("(.)([A-Z][a-z]+)")
import sqlalchemy as rdb metadata = rdb.MetaData() # for users and groups because of the zope users and groups PrincipalSequence = rdb.Sequence('principal_sequence') users = rdb.Table( "users", metadata, rdb.Column( "user_id", rdb.Integer, PrincipalSequence, primary_key=True ), rdb.Column( "login", rdb.Unicode(16), unique=True, nullable=True ), rdb.Column( "first_name", rdb.Unicode(80), nullable=False ), rdb.Column( "last_name", rdb.Unicode(80), nullable=False ), rdb.Column( "middle_name", rdb.Unicode(80) ), rdb.Column( "email", rdb.String(32), nullable=False ), ) class Users( object ): def checkPassword( self, *args, **kw): return True
def setRegistryNumber(item): session = Session() connection = session.connection(domain.ParliamentaryItem) sequence = rdb.Sequence('registry_number_sequence') item.registry_number = connection.execute(sequence)
class WellLogValue(Base, AbstractDBObject): """ This class connects the logging values to their related depth :param depth: depth of the log value :param value: log value at the specified depth :param args: parameters for AbstractDBObject initialisation :param kwargs: parameters for AbstractDBObject initialisation :return: Nothing :raises ValueError: if a parameter has wrong type or cannot be converted to required """ # define db table stratigraphic_name and columns __tablename__ = "logging_association" id = sq.Column(sq.INTEGER, sq.Sequence("logging_association_id_seq"), primary_key=True) log_depth = sq.Column(sq.FLOAT) log_value = sq.Column(sq.FLOAT) log_id = sq.Column(sq.INTEGER, sq.ForeignKey("well_logs.id"), default=-1) sq.Index("welllogdepth_index", log_depth) def __init__(self, depth: float, value: float, *args, **kwargs) -> None: """ Initialise the class """ self.depth = depth self.value = value AbstractDBObject.__init__(self, *args, **kwargs) def __repr__(self) -> str: text = "<WellLogValue(id='{}', depth='{}', value='{}', log_id='{}'),\n". \ format(self.id, self.depth, self.value, self.log_id) text += "Additional DBObject: {}>".format(AbstractDBObject.__repr__(self)) return text def __str__(self) -> str: text = "[{}] {}: {} (well_log.id: {})\n" \ .format(self.id, self.depth, self.value, self.log_id) text += "DBObject: {}".format(AbstractDBObject.__str__(self)) return text @property def depth(self) -> float: """ depth of the log value :raises ValueError: if log_depth is not type float or cannot be converted to it """ return float(self.log_depth) @depth.setter def depth(self, log_depth: float) -> None: """ see getter """ self.log_depth = float(log_depth) @property def value(self) -> float: """ log value :raises ValueError: Raises ValueError if log_value is not type float or cannot be converted to it """ return float(self.log_value) @value.setter def value(self, log_value: float) -> None: """ see getter """ self.log_value = float(log_value)
# revision identifiers, used by Alembic. from datetime import datetime revision = '8957d4adbc77' down_revision = 'c223cce1a413' from alembic import op import sqlalchemy as sa group = sa.Table( 'groups', sa.MetaData(), sa.Column('group_id', sa.Integer, sa.Sequence('seq__groups__group_id'), autoincrement=True, primary_key=True), sa.Column('group_name', sa.Unicode(16), unique=True, nullable=False), sa.Column('display_name', sa.Unicode(255)), sa.Column('created', sa.DateTime, default=datetime.utcnow), ) def upgrade(): connection = op.get_bind() connection.execute( group.update().where(group.c.group_name == 'managers').values( group_name='trusted-users', display_name='Trusted Users', ))
def downgrade(): op.drop_table('menu_items_translation') op.drop_table('menu_items') op.execute(DropSequence(sa.Sequence(name='menu_items_id_seq')))
def upgrade_resource(migrate_engine): meta = sqlalchemy.MetaData(bind=migrate_engine) res_table = sqlalchemy.Table('resource', meta, autoload=True) res_uuid_column_kwargs = {} if migrate_engine.name == 'ibm_db_sa': # NOTE(mriedem): DB2 10.5 doesn't support unique constraints over # nullable columns, it creates a unique index instead, so we have # to make the uuid column non-nullable in the DB2 case. res_uuid_column_kwargs['nullable'] = False res_uuid = sqlalchemy.Column('uuid', sqlalchemy.String(length=36), default=lambda: str(uuid.uuid4), **res_uuid_column_kwargs) res_table.create_column(res_uuid) if migrate_engine.name == 'postgresql': sequence = sqlalchemy.Sequence('res') sqlalchemy.schema.CreateSequence(sequence, bind=migrate_engine).execute() res_id = sqlalchemy.Column( 'tmp_id', sqlalchemy.Integer, server_default=sqlalchemy.text("nextval('res')")) else: res_id_column_kwargs = {} if migrate_engine.name == 'ibm_db_sa': # NOTE(mriedem): This is turned into a primary key constraint # later so it must be non-nullable. res_id_column_kwargs['nullable'] = False res_id = sqlalchemy.Column('tmp_id', sqlalchemy.Integer, **res_id_column_kwargs) res_table.create_column(res_id) fake_autoincrement = itertools.count(1) res_list = res_table.select().order_by( sqlalchemy.sql.expression.asc( res_table.c.created_at)).execute().fetchall() for res in res_list: values = {'tmp_id': fake_autoincrement.next(), 'uuid': res.id} update = res_table.update().where( res_table.c.id == res.id).values(values) migrate_engine.execute(update) constraint_kwargs = {'table': res_table} if migrate_engine.name == 'ibm_db_sa': # NOTE(mriedem): DB2 gives a random name to the unique constraint # if one is not provided so let's set the standard name ourselves. constraint_kwargs['name'] = 'uniq_resource0uuid0' cons = constraint.UniqueConstraint('uuid', **constraint_kwargs) cons.create() if migrate_engine.name == 'postgresql': # resource_id_seq will be dropped in the case of removing `id` column # set owner to none for saving this sequence (it is needed in the # earlier migration) migrate_engine.execute('alter sequence resource_id_seq owned by none') res_table.c.id.drop() alter_kwargs = {} if migrate_engine.name == 'ibm_db_sa': alter_kwargs['nullable'] = False res_table.c.tmp_id.alter('id', sqlalchemy.Integer, **alter_kwargs) cons = constraint.PrimaryKeyConstraint('tmp_id', table=res_table) cons.create() if migrate_engine.name == 'ibm_db_sa': # NOTE(chenxiao): For DB2, setting "ID" column "autoincrement=True" # can't make sense after above "tmp_id=>id" transformation, # so should work around it. sql = ("ALTER TABLE RESOURCE ALTER COLUMN ID SET GENERATED BY " "DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1)") migrate_engine.execute(sql) else: res_table.c.tmp_id.alter(sqlalchemy.Integer, autoincrement=True)
import sqlalchemy as db from sqlalchemy import table from eightqueens import QueensSolver if __name__ == "__main__": engine = db.create_engine('postgresql+psycopg2://brime:panda@db/queensdb') connection = engine.connect() metadata = db.MetaData() solutions_table = db.Table( 'solutions', metadata, db.Column('solution_id', db.Integer(), db.Sequence('id'), primary_key=True), db.Column('solution_string', db.String(25), nullable=False)) metadata.create_all(engine) payload = [] solver = QueensSolver(8) print("Solutions for Eight Queens:") print(len(solver.solutions)) for solution in solver.solutions: print(str(solution)) query = db.insert(solutions_table).values( solution_string=str(solution)) ResultProxy = connection.execute(query)
class Property(Base, AbstractLogClass): """ This class represents logging information for wells. For further details see :class:`~geological_toolbox.db_handler.AbstractLogClass` :param args: parameters for AbstractLogClass initialisation :param kwargs: parameters for AbstractLogClass initialisation """ # define db table name and columns __tablename__ = "properties" id = sq.Column(sq.INTEGER, sq.Sequence("properties_id_seq"), primary_key=True) point_id = sq.Column(sq.INTEGER, sq.ForeignKey("geopoints.id"), default=-1) prop_type = sq.Column(sq.VARCHAR(20), default="STRING") prop_value = sq.Column(sq.TEXT, default="") def __init__(self, value: any, _type: PropertyTypes, *args, **kwargs) -> None: """ Initialise the class """ AbstractLogClass.__init__(self, *args, **kwargs) self.property_type = _type self.property_value = value def __repr__(self) -> str: text = "<Property(value={})>\n".format(self.prop_value) text += AbstractLogClass.__repr__(self) return text def __str__(self) -> str: text = "{} [{}]: {} - ".format(self.property_name, self.property_unit, self.prop_value) text += AbstractDBObject.__str__(self) return text def __convert_value(self, value: str) -> any or None: """ converts the property value from type string to the specified type :return: converted property value """ if self.property_type == PropertyTypes.STRING: return str(value) try: if self.property_type == PropertyTypes.INT: return int(value) if self.property_type == PropertyTypes.FLOAT: return float(value) except ValueError: return None def __check_value(self, value: any) -> bool: """ Test, if the value can be converted to the specified format :param value: value to test :return: True, if it can be converted, else False """ return self.__convert_value(value) is not None @property def property_type(self) -> PropertyTypes: """ type of the property value :return: type of the property value :raises ValueError: if type is not available in PropertyTypes """ return PropertyTypes[self.prop_type] @property_type.setter def property_type(self, value: PropertyTypes) -> None: """ see getter """ if not isinstance(value, PropertyTypes): raise ValueError("{} is not in PropertyTypes".format(value)) self.prop_type = value.name @property def property_value(self) -> any or None: """ converted value of the property :return: converted value of the property :raises ValueError: if prop_value cannot be converted to the specified property_type """ return self.__convert_value(self.prop_value) @property_value.setter def property_value(self, value: any) -> None: """ see getter """ if self.__check_value(str(value)): self.prop_value = str(value) else: raise ValueError( "Cannot convert property values [{}] to specified type {}". format(value, self.property_type.name))
blocks = sa.Table( "blocks", metadata, Column("height", sa.Integer, primary_key=True), Column("timestamp", CustomDateTime, index=True), Column("proposer", sa.String, sa.ForeignKey("validators.consensus_address")), Column("hash", CustomBase64), Column("inflation", sa.Float), Column("supply", sa.String), # uband suffix ) transactions = sa.Table( "transactions", metadata, Column("id", sa.Integer, sa.Sequence("seq_transaction_id"), unique=True), Column("hash", CustomBase64, primary_key=True), Column("block_height", sa.Integer, sa.ForeignKey("blocks.height"), index=True), Column("gas_used", sa.Integer), Column("gas_limit", sa.Integer), Column("gas_fee", sa.String), # uband suffix Column("err_msg", sa.String, nullable=True), Column("success", sa.Boolean), Column("sender", sa.String), Column("memo", sa.String), Column("messages", sa.JSON), )
class FIDIAColumn(bases.PersistenceBase, bases.SQLAlchemyBase): """FIDIAColumns represent the atomic data unit in FIDIA. The Column is a collection of atomic data for a list/collection of objects in a Sample or Archive. The data an element of a column is either an atomic python type (string, float, int) or an array of the same, usually as a numpy array. Columns behave like, and can be used as, astropy.table Columns. Implementation Details ---------------------- Currently, this is a direct subclass of astropy.table.Column. However, that is in turn a direct subclass of np.ndarray. Therefore, it is not possible to create an "uninitialised" Column object that has no data (yet). This will cause problems with handling larger data-sets, as we'll have out of memory errors. Therefore, it will be necessary to re-implement most of the astropy.table.Column interface, while avoiding being a direct sub-class of np.ndarray. Previously known as TraitProperties """ __tablename__ = "fidia_columns" # Note this table is shared with FIDIAArrayColumn subclass _database_id = sa.Column(sa.Integer, sa.Sequence('column_seq'), primary_key=True) # Polymorphism (subclasses stored in same table) _db_type = sa.Column('type', sa.String(50)) __mapper_args__ = { 'polymorphic_on': "_db_type", 'polymorphic_identity': 'FIDIAColumn' } # Relationships (foreign keys) _db_archive_id = sa.Column(sa.Integer, sa.ForeignKey("archives._db_id")) # trait_property_mappings = relationship("TraitPropertyMapping", back_populates="_trait_mappings", # collection_class=attribute_mapped_collection('name')) # type: Dict[str, TraitPropertyMapping] _archive = relationship("Archive", back_populates="columns") # type: fidia.Archive # Storage Columns _column_id = sa.Column(sa.String) _object_getter = sa.Column(sa.PickleType) # type: Callable _object_getter_args = sa.Column(sa.PickleType) _array_getter = sa.Column(sa.PickleType) # type: Callable _array_getter_args = sa.Column(sa.PickleType) # Column Meta-data storage _ucd = sa.Column(sa.String) _unit = sa.Column(sa.String) _dtype = sa.Column(sa.String) n_dim = sa.Column(sa.Integer) pretty_name = sa.Column(sa.UnicodeText(length=30)) short_description = sa.Column(sa.Unicode(length=150)) long_description = sa.Column(sa.UnicodeText) allowed_types = RegexpGroup( 'string', 'float', 'int', re.compile(r"string\.array\.\d+"), re.compile(r"float\.array\.\d+"), re.compile(r"int\.array\.\d+"), # # Same as above, but with optional dimensionality # re.compile(r"string\.array(?:\.\d+)?"), # re.compile(r"float\.array(?:\.\d+)?"), # re.compile(r"int\.array(?:\.\d+)?"), ) catalog_types = ['string', 'float', 'int'] non_catalog_types = RegexpGroup( re.compile(r"string\.array\.\d+"), re.compile(r"float\.array\.\d+"), re.compile(r"int\.array\.\d+") # # Same as above, but with optional dimensionality # re.compile(r"string\.array(?:\.\d+)?"), # re.compile(r"float\.array(?:\.\d+)?"), # re.compile(r"int\.array(?:\.\d+)?"), ) # def __new__(cls, id, data): # self = super(FIDIAColumn, cls).__new__(cls, data=data) # # return self def __init__(self, *args, **kwargs): """Create a new FIDIAColumn. This should only be called by `ColumnDefinition.associate`. """ super(FIDIAColumn, self).__init__() # Internal storage for data of this column self._data = kwargs.pop('data', None) # Data Type information. Parsing and validation already done by `ColumnDefinition`. self._dtype = kwargs.pop('dtype', None) # Internal storage for IVOA Uniform Content Descriptor self._ucd = kwargs.get('ucd', None) # Unit information self._unit = kwargs.get('unit', None) # Archive Connection # self._archive = kwargs.pop('archive', None) # type: fidia.Archive self._archive_id = kwargs.pop('archive_id', None) # Construct the ID self._timestamp = kwargs.pop('timestamp', None) self._coldef_id = kwargs.pop('coldef_id', None) if "column_id" in kwargs: self._column_id = kwargs["column_id"] log.debug("Column ID Provided: %s", self._column_id) elif (self._archive_id is not None and self._timestamp is not None and self._coldef_id is not None): self._column_id = "{archive_id}:{coldef_id}:{timestamp}".format( archive_id=self._archive_id, coldef_id=self._coldef_id, timestamp=self._timestamp) log.debug("Column ID constructed: %s", self._column_id) else: raise ValueError( "Either column_id or all of (archive_id, coldef_id and timestamp) must be provided." ) # Descriptive meta-data self.pretty_name = kwargs.pop("pretty_name") self.short_description = kwargs.pop("short_description") self.long_description = kwargs.pop("long_description") @reconstructor def __db_init__(self): super(FIDIAColumn, self).__db_init__() self._data = None @property def column_definition_class(self): # type: () -> fidia.ColumnDefinition # c.f. fidia.utilities.fidia_classname() and fidia.ColumnDefinition.class_name() class_name = self.id.column_type if "." in class_name: # This is a ColumnDefinition defined outside of FIDIA raise ValueError( "Columns can not retrieve ColumnDefinitions defined outside of FIDIA" ) import fidia.column.column_definitions klass = getattr(fidia.column.column_definitions, class_name) return klass @property def id(self): # type: () -> ColumnID return ColumnID.as_column_id(self._column_id) def __repr__(self): return str(self.id) # def __get__(self, instance=None, owner=None): # # type: (Trait, Trait) -> str # if instance is None: # return self # else: # if issubclass(owner, Trait): # return self.data[instance.archive_index] # def associate(self, archive): # # type: (fidia.archive.archive.Archive) -> None # try: # instance_column = super(FIDIAColumn, self).associate(archive) # except: # raise # else: # instance_column._archive = archive # instance_column._archive_id = archive.archive_id # return instance_column def get_value(self, object_id, provenance="any"): """Retrieve the value from this column for the given object ID. Implementation -------------- This function tries each of the following steps until one returns a value: 1. Search the Data Access Layer 2. Use original `ColumnDefinition.object_getter` stored in local `._object_getter` 3. Use original `ColumnDefinition.array_getter` stored in local `._array_getter`, selecting just this row. """ if provenance not in ['any', 'dal', 'definition']: raise ValueError( "provenance must be one of 'any', 'dal' or 'definition'") if provenance in ['any', 'dal']: # STEP 1: Search the data access layer try: return fidia.dal_host.search_for_cell(self, object_id) except: log.info( "DAL did not provide data for column_id %s, object_id %s", self.id, object_id, exc_info=True) if provenance in ['any', 'definition']: # Confirm that at least one getter is available from the definition if self._object_getter is None and self._array_getter is None: raise FIDIAException( "No getter functions available for column") # STEP 2: Use original `ColumnDefinition.object_getter` log.debug( "Column '%s' retrieving value for object %s using original definitions", self, object_id) if self._object_getter is not None: log.vdebug( "Retrieving using cell getter from ColumnDefinition") log.vdebug("_object_getter(object_id=\"%s\", %s)", object_id, self._object_getter_args) result = self._object_getter(object_id, **self._object_getter_args) assert result is not None, "ColumnDefinition.object_getter must not return `None`." return result # STEP 3: Use original `ColumnDefinition.array_getter` log.vdebug( "Retrieving using array getter from ColumnDefinition via `._default_get_value`" ) return self._default_get_value(object_id) # This should not be reached unless something is wrong with the state of the data/ingestion. raise DataNotAvailable( "Neither the DAL nor the original ColumnDefinition could provide the requested data." ) def get_array(self): if self._array_getter is not None: result = self._array_getter(**self._array_getter_args) assert result is not None, "ColumnDefinition.array_getter must not return `None`." assert isinstance(result, pd.Series) # TODO: Why must this result be ordered? It's indexed... log.debug("Reordering array to ensure correct ordering.") ordered_result = result.reindex(self.contents, copy=False) # Since `copy=False`, this will only do work if required, otherwise the original Series is returned. # see: https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.reindex.html # @NOTE: The order of self.contents is not preserved from how it is initialized. return ordered_result else: data = [] index = [] for object_id in self.contents: try: data.append(self.get_value(object_id)) except DataNotAvailable: # This row of the array has no data. To avoid causing # up-casting of the type, (from e.g. int to float to # accomodate np.nan), we simply don't add this row to the # pd.Series object. pass else: index.append(object_id) # @NOTE: It may be more efficient to copy the index and then # remove items, rather than building it up. series = pd.Series(data, index=index) if series.dtype.name != self._dtype: raise TypeError( "get_array constructed an array of the wrong type %s, should be %s for column %s" % (series.dtype.name, self._dtype, self)) return series def _default_get_value(self, object_id): """Individual value getter, takes object_id as argument. Notes ----- The implementation here is not necessarily used. When a ColumnDefinition is associated with an archive, the resulting Column object typically has get_value replaced with a version based on the `.object_getter` defined on the ColumnDefinition. See `ColumnDefinition.associate()`. """ if self._data is not None: assert isinstance(self._data, pd.Series) return self._data.at[object_id] elif self.get_array is not None: self._data = self.get_array() # assert self._data is not None, "Programming Error: get_array should populate _data" return self._data[object_id] raise FIDIAException("Column has no data") @property def ucd(self): return getattr(self, '_ucd', None) @ucd.setter def ucd(self, value): self._ucd = value @property def unit(self): return self._unit @property def timestamp(self): if getattr(self, '_timestamp', None): return self._timestamp else: if "." in self.id.timestamp: return float(self.id.timestamp) else: return int(self.id.timestamp) @property def dtype(self): """The FIDIA type of the data in this column. These are restricted to be those in `FIDIAColumn.allowed_types`. Implementation Details ---------------------- Note that it may be preferable to have this property map from the underlying numpy type rather than be explicitly set by the user. """ return self._dtype @dtype.setter def type(self, value): if value not in self.allowed_types: raise Exception("Trait property type '{}' not valid".format(value)) self._dtype = value @property def contents(self): return self._archive.contents
def downgrade(engine_name): print "Downgrading {}".format(engine_name) # your downgrade script goes here op.drop_table('ck_friendships') op.execute(sa.schema.DropSequence(sa.Sequence('ck_friendships_id_seq')))
def get_next_reg(): session = Session() sequence = rdb.Sequence("registry_number_sequence") connection = session.connection(domain.ParliamentaryItem) return connection.execute(sequence)
class Message(Base): """ Message Model Attributes ---------- id: int [PK] unique identifier reciever_message: String(512) Encrypted message for the Reciever (encrypted with their public_key) sender_message: String(512) Encrypted message for the Sender (encrypted with their public_key) date: Time Date the message was sent to the server edited: Boolean Whether or not the message was editted read: Boolean Whether the message was read by the reciever or not sender: String(64) [FK] Sender of the message (their handle) reciever: String(64) [FK] Reciever of the message (their handle) """ __tablename__ = "Message" id = db.Column( db.Integer, db.Sequence("message_id", start=0, increment=1), primary_key=True, nullable=False, ) reciever_message = db.Column(db.String(512), nullable=False) sender_message = db.Column(db.String(512), nullable=False) date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow) edited = db.Column(db.Boolean, default=False, nullable=False) read = db.Column(db.Boolean, default=False, nullable=False) sender = db.Column(db.String(64), db.ForeignKey("User.handle")) reciever = db.Column(db.String(64), db.ForeignKey("User.handle")) def to_reciever_json(self) -> Dict[str, Any]: """ Reciever JSON Returns ------- Dict[str, Any] { "id": self.id, "sender": self.sender, "message": self.reciever_message, "date": self.date, "edited": self.edited } """ return { "id": self.id, "sender": self.sender, "message": self.reciever_message, "date": self.date.strftime("%m/%d/%y"), "edited": self.edited, } def to_sender_json(self) -> Dict[str, Any]: """ Sender JSON Returns ------- Dict[str, Any] { "id": self.id, "message": self.reciever_message, "date": self.date, "edited": self.edited, "read": self.read } """ return { "id": self.id, "message": self.sender_message, "date": self.date.strftime("%m/%d/%y"), "edited": self.edited, "read": self.read, }
def get_next_prog(context): session = Session() sequence = rdb.Sequence("%s_registry_sequence" % context.type) connection = session.connection(context.__class__) return connection.execute(sequence)
def upgrade(): incosistent_2g_externals = op.create_table( 'incosistent_2g_externals', sa.Column('pk', sa.Integer, primary_key=True), sa.Column('nodename', sa.String(100), nullable=False), sa.Column('ext_vendor', sa.String(100), nullable=False), sa.Column('int_vendor', sa.String(100), nullable=False), sa.Column('int_cellname', sa.String(200), nullable=False), sa.Column('ext_mnc', sa.Integer), sa.Column('ext_mcc', sa.Integer), sa.Column('ext_bcc', sa.Integer), sa.Column('ext_ncc', sa.Integer), sa.Column('ext_bcch', sa.Integer), sa.Column('ext_lac', sa.Integer), sa.Column('int_mnc', sa.Integer), sa.Column('int_mcc', sa.Integer), sa.Column('int_bcc', sa.Integer), sa.Column('int_ncc', sa.Integer), sa.Column('int_bcch', sa.Integer), sa.Column('int_lac', sa.Integer), sa.Column('age', sa.Integer, nullable=False, default=0), sa.Column('modified_by', sa.Integer), sa.Column('added_by', sa.Integer), sa.Column('date_added', sa.TIMESTAMP, default=sa.func.now(), onupdate=sa.func.now()), sa.Column('date_modified', sa.TIMESTAMP, default=sa.func.now()), schema=u'network_audit') op.execute( 'ALTER SEQUENCE network_audit.incosistent_2g_externals_pk_seq RENAME TO seq_incosistent_2g_externals_pk' ) op.create_unique_constraint('unique_incosistent_2g_externals', 'incosistent_2g_externals', \ ['nodename','ext_vendor', 'int_vendor','int_cellname','ext_mnc','ext_mcc','ext_bcc', \ 'ext_ncc','ext_bcch','ext_lac','int_mnc','int_mcc','int_bcc','int_ncc','int_bcch',\ 'int_lac'], schema='network_audit') incosistent_3g_externals = op.create_table( 'incosistent_3g_externals', sa.Column('pk', sa.Integer, primary_key=True), sa.Column('nodename', sa.String(100), nullable=False), sa.Column('ext_vendor', sa.String(100), nullable=False), sa.Column('int_vendor', sa.String(100), nullable=False), sa.Column('ext_cellname', sa.String(100), nullable=False, default=0), sa.Column('ext_mnc', sa.Integer), sa.Column('ext_mcc', sa.Integer), sa.Column('ext_dl_uarfcn', sa.Integer), sa.Column('ext_rac', sa.Integer), sa.Column('ext_lac', sa.Integer), sa.Column('ext_psc', sa.Integer), sa.Column('int_mnc', sa.Integer), sa.Column('int_mcc', sa.Integer), sa.Column('int_dl_uarfcn', sa.Integer), sa.Column('int_rac', sa.Integer), sa.Column('int_lac', sa.Integer), sa.Column('int_psc', sa.Integer), sa.Column('age', sa.Integer, nullable=False, default=0), sa.Column('modified_by', sa.Integer), sa.Column('added_by', sa.Integer), sa.Column('date_added', sa.TIMESTAMP, default=sa.func.now(), onupdate=sa.func.now()), sa.Column('date_modified', sa.TIMESTAMP, default=sa.func.now()), schema=u'network_audit') op.execute( 'ALTER SEQUENCE network_audit.incosistent_3g_externals_pk_seq RENAME TO seq_incosistent_3g_externals_pk' ) op.create_unique_constraint('unique_incosistent_3g_externals', 'incosistent_3g_externals', \ ['nodename','ext_vendor','int_vendor','ext_cellname','ext_mnc', \ 'ext_mcc','ext_dl_uarfcn','ext_rac','ext_lac','ext_psc','int_mnc',\ 'int_mcc','int_dl_uarfcn','int_rac','int_lac','int_psc'], schema='network_audit') incosistent_4g_externals = op.create_table( 'incosistent_4g_externals', sa.Column('pk', sa.Integer, primary_key=True), sa.Column('nodename', sa.String(100), nullable=False), sa.Column('ext_vendor', sa.String(100), nullable=False), sa.Column('int_vendor', sa.String(100), nullable=False), sa.Column('ext_cellname', sa.String(100), nullable=False, default=0), sa.Column('ext_mnc', sa.Integer), sa.Column('ext_mcc', sa.Integer), sa.Column('ext_dl_earfcn', sa.Integer), sa.Column('ext_pci', sa.Integer), sa.Column('int_mnc', sa.Integer), sa.Column('int_mcc', sa.Integer), sa.Column('int_dl_earfcn', sa.Integer), sa.Column('int_pci', sa.Integer), sa.Column('age', sa.Integer, nullable=False, default=0), sa.Column('modified_by', sa.Integer), sa.Column('added_by', sa.Integer), sa.Column('date_added', sa.TIMESTAMP, default=sa.func.now(), onupdate=sa.func.now()), sa.Column('date_modified', sa.TIMESTAMP, default=sa.func.now()), schema=u'network_audit') op.execute( 'ALTER SEQUENCE network_audit.incosistent_4g_externals_pk_seq RENAME TO seq_incosistent_4g_externals_pk' ) op.create_unique_constraint('unique_incosistent_4g_externals', 'incosistent_4g_externals', \ ['nodename','ext_vendor','int_vendor','ext_cellname','ext_mnc','ext_mcc',\ 'ext_dl_earfcn','ext_pci','int_mnc','int_mcc','int_dl_earfcn','int_pci'], \ schema='network_audit') redundant_externals = op.create_table('redundant_externals', sa.Column('pk', sa.Integer, primary_key=True), sa.Column('nodename', sa.String(100), nullable=False), sa.Column('technology', sa.String(100), nullable=False), sa.Column('vendor', sa.String(100), nullable=False), sa.Column('cellname', sa.String(100), nullable=False, default=0), sa.Column('age', sa.Integer, nullable=False, default=0), sa.Column('modified_by', sa.Integer), sa.Column('added_by', sa.Integer), sa.Column('date_added', sa.TIMESTAMP, default=sa.func.now(), onupdate=sa.func.now()), sa.Column('date_modified', sa.TIMESTAMP, default=sa.func.now()), schema=u'network_audit') op.execute( 'ALTER SEQUENCE network_audit.redundant_externals_pk_seq RENAME TO seq_redundant_externals_pk' ) op.create_unique_constraint('unique_redundant_externals', 'redundant_externals',\ ['nodename','technology','vendor','cellname'], schema='network_audit') audit_categories = sa.sql.table( 'audit_categories', sa.Column('pk', sa.Integer, sa.Sequence('seq_audit_categories_pk', ), primary_key=True, nullable=False), sa.Column('name', sa.String(255), nullable=False), sa.Column('notes', sa.Text, nullable=False), sa.Column('parent_pk', sa.Integer, nullable=False, default=0), sa.Column('in_built', sa.Boolean, default=False), sa.Column('modified_by', sa.Integer, default=0), sa.Column('added_by', sa.Integer, default=0), sa.Column('date_added', sa.TIMESTAMP, default=sa.func.now(), onupdate=sa.func.now()), sa.Column('date_modified', sa.TIMESTAMP, default=sa.func.now()), ) audit_rules = sa.sql.table( 'audit_rules', sa.Column('pk', sa.Integer, sa.Sequence('seq_audit_categories_pk', ), primary_key=True, nullable=False), sa.Column('name', sa.String(255), nullable=False), sa.Column('notes', sa.Text, nullable=False), sa.Column('category_pk', sa.Integer, nullable=False, default=0), sa.Column('in_built', sa.Boolean, default=False), sa.Column('table_name', sa.String(255), nullable=False), sa.Column('sql', sa.Text, nullable=False), sa.Column('modified_by', sa.Integer), sa.Column('added_by', sa.Integer), sa.Column('date_added', sa.TIMESTAMP, default=sa.func.now(), onupdate=sa.func.now()), sa.Column('date_modified', sa.TIMESTAMP, default=sa.func.now()), ) # Create baseline category op.bulk_insert(audit_categories, [ { 'name': 'Definitions', 'notes': 'Network Relations Audits' }, ]) connection = op.get_bind() r = connection.execute(audit_categories.select().where( audit_categories.c.name == 'Definitions')) category_pk = 0 for row in r: category_pk = row['pk'] op.bulk_insert(audit_rules, [ { 'name': 'Inconsistent 2G externals', 'category_pk': category_pk, 'in_built': True, 'table_name': 'incosistent_2g_externals', 'sql': 'SELECT * FROM network_audit.incosistent_2g_externals', 'notes': 'Inconsistent external 2G parameters' }, ]) op.bulk_insert(audit_rules, [ { 'name': 'Inconsistent 3G externals', 'category_pk': category_pk, 'in_built': True, 'table_name': 'incosistent_3g_externals', 'sql': 'SELECT * FROM network_audit.incosistent_3g_externals', 'notes': 'Inconsistent external 3G parameters' }, ]) op.bulk_insert(audit_rules, [ { 'name': 'Inconsistent 4G externals', 'category_pk': category_pk, 'in_built': True, 'table_name': 'incosistent_4g_externals', 'sql': 'SELECT * FROM network_audit.incosistent_4g_externals', 'notes': 'Inconsistent external 4G parameters' }, ]) op.bulk_insert(audit_rules, [ { 'name': 'Redundant Externals', 'category_pk': category_pk, 'in_built': True, 'table_name': 'redundant_externals', 'sql': 'SELECT * FROM network_audit.redundant_externals', 'notes': 'Redundant externals definitions' }, ])
class WellLog(Base, AbstractLogClass): """ represents logging information for wells """ # define db table well_logs and columns __tablename__ = "well_logs" id = sq.Column(sq.INTEGER, sq.Sequence("well_logs_id_seq"), primary_key=True) well_id = sq.Column(sq.INTEGER, sq.ForeignKey("wells.id"), default=-1) # define markers relationship log_values = relationship("WellLogValue", order_by=WellLogValue.log_depth, backref="well_log", primaryjoin="WellLog.id==WellLogValue.log_id", cascade="all, delete, delete-orphan") def __init__(self, *args, **kwargs) -> None: """ Initialise the class """ AbstractLogClass.__init__(self, *args, **kwargs) def __repr__(self) -> str: text = "<WellLog(id='{}', well_id='{}', log values='{}')>\n". \ format(self.id, self.well.id, str(self.log_values)) text += "Additional DBObject: {}".format(AbstractDBObject.__repr__(self)) return text def __str__(self) -> str: text = "[{}] Well ({}: {})\n" \ .format(self.id, self.well.id, self.well.name) text += "DBObject: {}".format(AbstractDBObject.__str__(self)) for assoc in self.log_values: text += "{}".format(str(assoc)) return text def insert_log_value(self, log_value: WellLogValue) -> None: """ Insert a new log value in the log ATTENTION: If you insert a log value, the log will be automatically stored in the database! :param log_value: WellLogValue to be inserted :return: Nothing :raises TypeError: if marker is not an instance of WellLogValue :raises ValueError: if the depth of the marker is larger than the drilled depth of the well or < 0 """ if not isinstance(log_value, WellLogValue): raise TypeError("log_value {} is not of type WellLogValue!".format(str(log_value))) if log_value.depth < 0: raise ValueError("Value depth ({}) < 0!".format(log_value.depth)) if (not (self.well is None)) and (log_value.depth > self.well.depth): raise ValueError("Value depth ({}) is larger than final well depth ({})!".format(log_value.depth, self.well.depth)) self.log_values.append(log_value) # new sorting to ensure correct order without storage and reloading from the database self.log_values.sort(key=lambda x: x.depth) def insert_multiple_log_values(self, log_values: List[WellLogValue]) -> None: """ Insert the multiple log values in the log ATTENTION: If you insert values, the log will be automatically stored in the database! :param log_values: List of marker to be inserted :return: Nothing :raises TypeError: if one of the marker is not an instance of WellLogValue :raises ValueError: if the depth of a value is larger than the drilled depth of the well or < 0 """ for value in log_values: if not isinstance(value, WellLogValue): raise TypeError( "At least on value is not of type WellLogValue ({}: {})!".format(str(value), str(type(value)))) if value.depth < 0: raise ValueError("Value depth ({}) < 0!".format(value.depth)) if (not (self.well is None)) and (value.depth > self.well.depth): raise ValueError("Value depth ({}) is larger than final well depth ({})!". format(value.depth, self.well.depth)) self.log_values += log_values # new sorting to ensure correct order without storage and reloading from the database self.log_values.sort(key=lambda x: x.depth) def get_value_by_depth(self, depth: float) -> WellLogValue or None: """ Returns the value at depth "depth" :param depth: depth of the requested marker :return: Returns the value at depth "depth" :raises ValueError: if no marker was found for the committed depth or depth is not compatible to float """ depth = float(depth) for value in self.log_values: if value.depth == depth: return value raise ValueError("No value found at depth {}".format(depth)) def delete_value(self, value: WellLogValue) -> None: """ Deletes the value from the well log :param value: WellLogValue object which should be deleted :return: Nothing :raises TypeError: if value is not of type WellLogValue :raises ValueError: if the value is not part of the well log """ if not isinstance(value, WellLogValue): raise TypeError("marker {} is not an instance of WellLogValue!".format(str(value))) try: self.log_values.remove(value) except ValueError as e: raise ValueError("{}\nWellLogValue with ID {} not found in value list!".format(str(e), value.id))
def downgrade(): op.execute(DropSequence(sa.Sequence("certs_sn"))) op.drop_table("devices")
class UserKey(Base): __tablename__ = "user_key" key = sa.Column(sa.Integer, sa.Sequence('user_id_seq'), primary_key=True, doc="primary key") deactivated_at = sa.Column(sa.DateTime(), nullable=True) keytype = sa.Column(sa.String(36), nullable=False)
def upgrade(): managedobjects = sa.sql.table( 'managedobjects', sa.Column('pk', sa.Integer, sa.Sequence('seq_managedobjects_pk', ), primary_key=True, nullable=False), sa.Column('name', sa.String(50), nullable=False), sa.Column('notes', sa.Text), sa.Column('label', sa.String(200)), sa.Column('parent_pk', sa.Integer), sa.Column('tech_pk', sa.Integer), sa.Column('vendor_pk', sa.Integer), sa.Column('modified_by', sa.Integer), sa.Column('added_by', sa.Integer), sa.Column('date_added', sa.TIMESTAMP, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow), sa.Column('date_modified', sa.TIMESTAMP, default=datetime.datetime.utcnow)) op.bulk_insert(managedobjects, [ { 'name': 'ALGODEFAULTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ANR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'APPCERT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'BASEBANDEQM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'BASEBANDEQMBOARDREF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'BCCHCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'BFMIMOADAPTIVEPARACFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CAMGTCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLACBAR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLACCESS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLALGOSWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLBF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLBFMIMOPARACFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLCHPWRCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLCSPCPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDLCOMPALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDLICIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDLICICMCPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDLPCPDCCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDLPCPDSCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDLPCPDSCHPA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDLPCPHICH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDLSCHALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDRXPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDSS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLDYNACBARALGOPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLHOPARACFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLIDPRDUPT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLLOWPOWER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLMBMSCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLMCPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLMIMOPARACFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLMLB', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLMLBHO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLMRO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLNOACCESSALMPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLOP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLPCALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLPDCCHALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLPUCCHALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLRACHALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLRACTHD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLRESEL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLRESELGERAN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLRESELUTRAN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLRFSHUTDOWN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLRICALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLSEL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLSERVICEDIFFCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLSHUTDOWN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLSIMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLSTANDARDQCI', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLULCOMPALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLULICIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLULICICMCPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLULPCCOMM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLULPCDEDIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CELLULSCHALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CERTCHKTSK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CERTDEPLOY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CERTMK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CERTREQ', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CNOPERATOR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CNOPERATORHOCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CNOPERATORIPPATH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CNOPERATORSTANDARDQCI', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CNOPERATORTA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'COUNTERCHECKPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CPBEARER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CQIADAPTIVECFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CRLPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CSFALLBACKBLINDHOCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CSFALLBACKHO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CSFALLBACKPOLICYCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CSPCALGOPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'DEVIP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'DHCPRELAYSWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'DIFPRI', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'DISTBASEDHO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'DRX', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'DRXPARAGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'DSCPMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'EMC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ENODEBALGOSWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ENODEBAUTOPOWEROFF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ENODEBCIPHERCAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ENODEBCONNSTATETIMER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ENODEBFUNCTION', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ENODEBINTEGRITYCAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ENODEBMLB', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ENODEBPATH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ENODEBSHARINGMODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'EPGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ETHPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'EUCELLSECTOREQM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'EUCOSCHCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'EUTRANEXTERNALCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'EUTRANINTRAFREQNCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'FDDRESMODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'filefooter', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'GERANEXTERNALCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'GERANINTERFCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'GERANNCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'GERANNFREQGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'GERANNFREQGROUPARFCN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'GLOBALPROCSWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'GTPU', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'GTRANSPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'HOMEASCOMM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IKECFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTERFREQHOGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTERRATCELLSHUTDOWN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTERRATHOCDMA1XRTTGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTERRATHOCDMAHRPDGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTERRATHOCOMM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTERRATHOCOMMGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTERRATHOGERANGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTERRATHOUTRANGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTERRATPOLICYCFGGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTRAFREQHOGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTRARATHOCOMM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPGUARD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPPATH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPRT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'LOCATION', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'MIMOADAPTIVEPARACFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'MMEFEATURECFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'MRO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'NE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'NODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'OMCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PCCHCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PDCPROHCPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PDSCHCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PHICHCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PUCCHCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PUSCHCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PUSCHPARAM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RACHCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RET', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RETDEVICEDATA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RETSUBUNIT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RLCPDCPPARAGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RRCCONNSTATETIMER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RRUJOINTCALPARACFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'S1', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'S1INTERFACE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'S1REESTTIMER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SCTPHOST', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SCTPHOSTREF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SCTPLNK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SCTPPEER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SCTPPEERREF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SECTOR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SECTORANTENNAREF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SECTOREQM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SECTOREQMANTENNAREF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SERVICEIFDLEARFCNGRP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SERVICEIFHOCFGGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SERVICEIRHOCFGGROUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SIMULOAD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SRSADAPTIVECFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SRSCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'STANDARDQCI', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SUBSESSION_NE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TACALG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TCEIPMAPPING', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TCPACKCTRLALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TCPACKLIMITALG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TCPMSSCTRL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TDDFRAMEOFFSET', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TDDRESMODESWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TIMEALIGNMENTTIMER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TOLCALG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TPEALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TRUSTCERT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TYPDRBBSR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'UDT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'UDTPARAGRP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'UETIMERCONST', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'USERPLANEHOST', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'USERPLANEHOSTREF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'USERPLANEPEER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'USERPLANEPEERREF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'UTRANEXTERNALCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'UTRANNCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'UTRANNFREQ', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'VLANMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'VQMALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'VRF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'X2', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'X2BLACKWHITELIST', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, { 'name': 'X2INTERFACE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 3, 'modified_by': 0, 'added_by': 0 }, ])
def create_common_data_format_tables(session, dirpath="CDF_schema_def_info/"): """schema example: 'cdf'; Creates cdf tables in the given schema (or directly in the db if schema == None) e_table_list is a list of enumeration tables for the CDF, e.g., ['ReportingUnitType','CountItemType', ... ] Does *not* fill enumeration tables. """ eng = session.bind metadata = MetaData(bind=eng) # create the single sequence for all db ids id_seq = sa.Sequence("id_seq", metadata=metadata) # create enumeration tables e_table_list = enum_table_list(dirpath) for t in e_table_list: create_table(metadata, id_seq, t, "enumerations", dirpath) # create element tables (cdf and metadata) and push to db element_path = os.path.join(dirpath, "elements") elements_to_process = [f for f in os.listdir(element_path) if f[0] != "."] # dynamic list of elements whose tables haven't been created yet while elements_to_process: element = elements_to_process[0] # check foreign keys; if any refers to an elt yet to be processed, change to that elt # note that any foreign keys for elements are to other elements, so it's OK to do this without considering # joins first or concurrently. foreign_keys = pd.read_csv(os.path.join(element_path, element, "foreign_keys.txt"), sep="\t") for i, r in foreign_keys.iterrows(): fk_set = set(r["refers_to"].split( ";")) # lists all targets of the foreign key r['fieldname'] try: element = [e for e in fk_set if e in elements_to_process].pop() break except IndexError: pass # create indices for efficiency if element == "VoteCount": create_indices = [ "CountItemType_Id", "OtherCountItemType", "ReportingUnit_Id", "Contest_Id", "Selection_Id", "Election_Id", "_datafile_Id", ] elif element == "CandidateSelection": create_indices = ["Candidate_Id", "Party_Id"] elif element == "ReportingUnit": create_indices = ["ReportingUnitType_Id"] else: # create_indices = [[db.get_name_field(element)]] create_indices = None # TODO fix for efficiency -- note <contest_type>Contest, <contest_type>Selection may need special treatment # create db table for element create_table( metadata, id_seq, element, "elements", dirpath, create_indices=create_indices, ) # remove element from list of yet-to-be-processed elements_to_process.remove(element) # create join tables # TODO check for foreign keys, as above # check for foreign keys join_path = os.path.join(dirpath, "joins") joins_to_process = [f for f in os.listdir(join_path) if f[0] != "."] while joins_to_process: j = joins_to_process[0] if j == "ComposingReportingUnitJoin": create_indices = [ "ParentReportingUnit_Id", "ChildReportingUnit_Id" ] else: create_indices = None # check foreign keys; if any refers to an elt yet to be processed, change to that elt # note that any foreign keys for elements are to other elements, so it's OK to do this without considering # joins first or concurrently. foreign_keys = pd.read_csv(os.path.join(join_path, j, "foreign_keys.txt"), sep="\t") for i, r in foreign_keys.iterrows(): fk_set = set(r["refers_to"].split( ";")) # lists all targets of the foreign key r['fieldname'] try: j = [e for e in fk_set if e in joins_to_process].pop() break except IndexError: pass # create db table for element create_table(metadata, id_seq, j, "joins", dirpath, create_indices) # remove element from list of yet-to-be-processed joins_to_process.remove(j) # push all tables to db metadata.create_all() session.flush() return metadata
import datetime import json from databases import Database import sqlalchemy from sqlalchemy import sql from .base import BaseRecorder from ..op import OpDetails metadata = sqlalchemy.MetaData() events_id_seq = sqlalchemy.Sequence('events_id_seq', metadata=metadata) events = sqlalchemy.Table( 'events', metadata, sqlalchemy.Column('id', sqlalchemy.Integer, events_id_seq, primary_key=True), sqlalchemy.Column('time', sqlalchemy.Time), sqlalchemy.Column('name', sqlalchemy.Text), sqlalchemy.Column('payload', sqlalchemy.Text), sqlalchemy.Column('unknown', sqlalchemy.Boolean), ) packets_id_seq = sqlalchemy.Sequence('packets_id_seq', metadata=metadata) packets = sqlalchemy.Table( 'packets', metadata, sqlalchemy.Column('id', sqlalchemy.Integer, packets_id_seq, primary_key=True), sqlalchemy.Column('time', sqlalchemy.Time), sqlalchemy.Column('op_code', sqlalchemy.Integer), sqlalchemy.Column('event_name', sqlalchemy.Text),
def setTabledDocumentSerialNumber(tabled_document): session = Session() connection = session.connection(domain.TabledDocument) sequence = rdb.Sequence('tabled_document_number_sequence') tabled_document.tabled_document_number = connection.execute(sequence)
# Copyright (C) 2010 - Africa i-Parliaments - http://www.parliaments.info/ # Licensed under GNU GPL v2 - http://www.gnu.org/licenses/gpl-2.0.txt """The Bungeni relational schema $Id$ """ log = __import__("logging").getLogger("bungeni.models.schema") import sqlalchemy as sa from fields import FSBlob from datetime import datetime metadata = sa.MetaData() # users and groups because of the zope users and groups PrincipalSequence = sa.Sequence("principal_sequence") # vertical properties vp_text = sa.Table( "vp_text", metadata, sa.Column("object_id", sa.Integer, primary_key=True, nullable=False), sa.Column("object_type", sa.String(32), primary_key=True, nullable=False), sa.Column( "name", sa.String(50), primary_key=True, nullable=False, ), sa.Column("value", sa.UnicodeText),
def update_db(date, year, genus, lab, source, amendment_flag, amended_id): con, meta = connect(user=POSTGRES_USERNAME, password=POSTGRES_PASSWORD, db='autorogadev') # NOTE db=autorogadev ROGA_ID_SEQ = sa.Sequence('roga_id_seq') try: # Create table if it doesn't already exist autoroga_project_table = sa.Table( 'autoroga_project_table', meta, sa.Column('id', sa.INTEGER, ROGA_ID_SEQ, primary_key=True, server_default=ROGA_ID_SEQ.next_value()), sa.Column('roga_id', sa.String(64)), sa.Column('genus', sa.String(64)), sa.Column('lab', sa.String(16)), sa.Column('source', sa.String(64)), sa.Column('amendment_flag', sa.String(16)), sa.Column('amended_id', sa.String(64)), sa.Column('date', sa.Date), sa.Column('time', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('deletion_date', sa.Date), sa.Column('deletion_reason', sa.String(256))) meta.create_all() print('Successfully created autoroga_project_table') except: # Retrieve table if it already exists autoroga_project_table = sa.Table('autoroga_project_table', meta, autoload=True, autoload_with=sa.engine) print('Successfully retrieved autoroga_project_table') # Grab what the next key value will be select_next_value = sa.select([autoroga_project_table.c.id]) keys = con.execute(select_next_value) try: next_val = max(keys)[0] + 1 except: next_val = 1 # Create ROGA ID select_next_roga_id = sa.select([autoroga_project_table.c.roga_id]) keys = con.execute(select_next_roga_id) roga_ids = keys.fetchall() # Now parse through ROGA IDs to figure out what the next one should be. roga_ids are a list of tuples ids_for_year = list() for item in roga_ids: # ROGA is actually first element of tuple. roga = item[0] roga_year = int(roga.split('-')[0]) roga_id = int(roga.split('-')[-1]) if roga_year == int(year): ids_for_year.append(roga_id) i = 1 roga_id_found = False while roga_id_found is False: if i in ids_for_year: i += 1 else: roga_id_found = True roga_id = year + '-ROGA-DEV-' + '{:04d}'.format(i) # Insert new row into autoroga_project_table table ins = autoroga_project_table.insert().values( roga_id=roga_id, genus=genus, date=date, lab=lab, source=source, amendment_flag=amendment_flag, amended_id=amended_id, time=datetime.datetime.utcnow()) con.execute(ins) return roga_id
def downgrade(): op.drop_table(model.XPath.__tablename__) op.execute(DropSequence(sa.Sequence('msg_xpath_seq'))) op.drop_table(model.JSONPointer.__tablename__) op.execute(DropSequence(sa.Sequence('msg_json_pointer_seq')))
def upgrade(): managedobjects = sa.sql.table( 'managedobjects', sa.Column('pk', sa.Integer, sa.Sequence('seq_managedobjects_pk', ), primary_key=True, nullable=False), sa.Column('name', sa.String(50), nullable=False), sa.Column('notes', sa.Text), sa.Column('label', sa.String(200)), sa.Column('parent_pk', sa.Integer), sa.Column('tech_pk', sa.Integer), sa.Column('vendor_pk', sa.Integer), sa.Column('modified_by', sa.Integer), sa.Column('added_by', sa.Integer), sa.Column('date_added', sa.TIMESTAMP, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow), sa.Column('date_modified', sa.TIMESTAMP, default=datetime.datetime.utcnow)) op.bulk_insert(managedobjects, [ { 'name': 'A2NE', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'A2ST', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'A2UT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ACCP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ADJG', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ADJI', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ADJL', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ADJS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ADR4GW', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'AMGR', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ANBA', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ANTL', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'BFD', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'BFDGRP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'BTSSCW', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CABINET', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CCFA', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CERTH', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CESIF', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CMOB', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'COCO', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'CONNECTOR', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ETHLK', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'FMCG', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'FMCI', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'FMCS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'FTM', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'FUUNIT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'HOPG', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'HOPI', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'HOPL', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'HOPS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'HW', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IAIF', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IBFD', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IBFP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IDNS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IDSP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IEIF', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IFPG', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IGIF', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IHCP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IICP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IMAG', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'INTP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPBR', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPHB', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPNB', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPNO', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPQM', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPRM', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPRO', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPRT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IPSECC', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IQOS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ISBFP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ITRKGRP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'ITRKOBJ', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IUBSNT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IUCS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IUCSIP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IUO', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IUPS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IUPSIP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IUR', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'IVIF', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'L2SWI', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'LCELGW', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'LCELW', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'MHA', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'MODULE', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'MRBTS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'OSPFV2', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PISCHDLITEM', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PMSCHDLS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PPTT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'PWNE', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'QOS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RMOD', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RNAC', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RNC', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RNFC', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RNHSPA', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RNMOBI', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RNPS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RNRLC', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RNTRM', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'RSTP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SBR4', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SMOD', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SPTT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SRT4', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SRTT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'STPG', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'STPORT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SUBMODULE', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SUBRACK', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SVTT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'SYNC', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TCTT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TMPAR', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TOPB', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TOPF', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TRDE', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TWAMP', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'TWAMPR', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'UNI', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'UNIT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'VBTS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'VCCT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'VCEL', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'VCTT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'VPCT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'VPTT', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'WAC', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'WBTS', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'WCEL', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, { 'name': 'WRAB', 'parent_pk': 0, 'vendor_pk': 4, 'tech_pk': 2, 'modified_by': 0, 'added_by': 0 }, ])
def upgrade(): conn = alembic.context.get_context().bind meta = sqlalchemy.MetaData(bind=conn) meta.reflect() users = meta.tables["users"] all_users = dict( conn.execute(sqlalchemy.select([users.c.name, users.c.id])).fetchall()) shows = alembic.op.create_table( "shows", sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True), sqlalchemy.Column("string_id", sqlalchemy.Text, nullable=False, unique=True), sqlalchemy.Column("name", sqlalchemy.Text, nullable=False), ) alembic.op.execute( sqlalchemy.schema.CreateSequence( sqlalchemy.Sequence("games_id_seq", start=-1, increment=-1))) games = alembic.op.create_table( "games", sqlalchemy.Column( "id", sqlalchemy.Integer, sqlalchemy.Sequence("game_id_seq"), primary_key=True, server_default=sqlalchemy.func.nextval('games_id_seq')), sqlalchemy.Column("name", sqlalchemy.Text, unique=True, nullable=False), ) alembic.op.execute("ALTER SEQUENCE games_id_seq OWNED BY games.id") game_per_show_data = alembic.op.create_table( "game_per_show_data", sqlalchemy.Column("game_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("games.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), sqlalchemy.Column("show_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("shows.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), sqlalchemy.Column("display_name", sqlalchemy.Text), sqlalchemy.Column("verified", sqlalchemy.Boolean), ) alembic.op.create_primary_key("game_per_show_data_pk", "game_per_show_data", ["game_id", "show_id"]) stats = alembic.op.create_table( "stats", sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True), sqlalchemy.Column("string_id", sqlalchemy.Text, nullable=False, unique=True), sqlalchemy.Column("singular", sqlalchemy.Text), sqlalchemy.Column("plural", sqlalchemy.Text), sqlalchemy.Column("emote", sqlalchemy.Text), ) game_stats = alembic.op.create_table( "game_stats", sqlalchemy.Column("game_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("games.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), sqlalchemy.Column("show_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("shows.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), sqlalchemy.Column("stat_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("shows.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), sqlalchemy.Column("count", sqlalchemy.Integer, nullable=False), ) alembic.op.create_primary_key("game_stats_pk", "game_stats", ["game_id", "show_id", "stat_id"]) game_votes = alembic.op.create_table( "game_votes", sqlalchemy.Column("game_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("games.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), sqlalchemy.Column("show_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("shows.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), sqlalchemy.Column("user_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("users.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), sqlalchemy.Column("vote", sqlalchemy.Boolean, nullable=False), ) alembic.op.create_primary_key("game_votes_pk", "game_votes", ["game_id", "show_id", "user_id"]) disabled_stats = alembic.op.create_table( "disabled_stats", sqlalchemy.Column("show_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("shows.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), sqlalchemy.Column("stat_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("stats.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False), ) alembic.op.create_primary_key("disabled_stats_pk", "disabled_stats", ["show_id", "stat_id"]) # Move data datafile = alembic.context.config.get_section_option( "lrrbot", "datafile", "data.json") clientid = alembic.context.config.get_section_option( "lrrbot", "twitch_clientid") with open(datafile) as f: data = json.load(f) # stats alembic.op.bulk_insert(stats, [{ "string_id": string_id, "emote": values.get("emote"), "plural": values.get("plural"), "singular": values.get("singular"), } for string_id, values in data.get("stats", {}).items()]) all_stats = dict( conn.execute(sqlalchemy.select([stats.c.string_id, stats.c.id])).fetchall()) # shows alembic.op.bulk_insert(shows, [{ "string_id": show, "name": values["name"], } for show, values in data.get("shows", {}).items()]) all_shows = dict( conn.execute(sqlalchemy.select([shows.c.string_id, shows.c.id])).fetchall()) # games def parse_id(id): if id is None: return None try: return int(id) except ValueError: return None for show in data.get("shows", {}).values(): for game_id, game in show.get("games", {}).items(): game_id = parse_id(game_id) or parse_id(game.get("id")) if game_id is None: conn.execute( "INSERT INTO games (name) VALUES (%(name)s) ON CONFLICT (name) DO NOTHING", {"name": game["name"]}) else: conn.execute( """ INSERT INTO games ( id, name ) VALUES ( %(id)s, %(name)s ) ON CONFLICT (name) DO UPDATE SET id = EXCLUDED.id """, { "id": game_id, "name": game["name"] }) all_games = dict( conn.execute(sqlalchemy.select([games.c.name, games.c.id])).fetchall()) # game_per_show_data display_names = [] for show_id, show in data.get("shows", {}).items(): for game in show.get("games", {}).values(): if "display" in game: display_names.append({ "show_id": all_shows[show_id], "game_id": parse_id(game.get("id")) or all_games[game["name"]], "display_name": game["display"], }) alembic.op.bulk_insert(game_per_show_data, display_names) # game_stats all_game_stats = [] for show_id, show in data.get("shows", {}).items(): for game in show.get("games", {}).values(): game_id = parse_id(game.get("id")) or all_games[game["name"]] for stat, count in game.get("stats", {}).items(): all_game_stats.append({ "show_id": all_shows[show_id], "game_id": game_id, "stat_id": all_stats[stat], "count": count, }) alembic.op.bulk_insert(game_stats, all_game_stats) # game_votes all_votes = [] with requests.Session() as session: for show_id, show in data.get("shows", {}).items(): for game in show.get("games", {}).values(): game_id = parse_id(game.get("id")) or all_games[game["name"]] for nick, vote in game.get("votes", {}).items(): if nick not in all_users: try: req = session.get( "https://api.twitch.tv/kraken/users?login=%s" % urllib.parse.quote(nick), headers={ 'Client-ID': clientid, 'Accept': 'application/vnd.twitchtv.v5+json' }) req.raise_for_status() user = req.json()['users'][0] all_users[nick] = user["_id"] alembic.op.bulk_insert( users, [{ "id": user["_id"], "name": user["name"], "display_name": user.get("display_name"), }]) except Exception: log.exception("Failed to fetch data for %r", nick) all_users[nick] = None if all_users[nick] is None: continue all_votes.append({ "show_id": all_shows[show_id], "game_id": game_id, "user_id": all_users[nick], "vote": vote, }) alembic.op.bulk_insert(game_votes, all_votes) # disabled_stats if "swiftlycam" in all_shows: for_cameron = [] if "death" in all_stats: for_cameron.append({ "show_id": all_shows["swiftlycam"], "stat_id": all_stats["death"] }) if "tilt" in all_stats: for_cameron.append({ "show_id": all_shows["swiftlycam"], "stat_id": all_stats["tilt"] }) if "pave" in all_stats: for_cameron.append({ "show_id": all_shows["swiftlycam"], "stat_id": all_stats["pave"], }) alembic.op.bulk_insert(disabled_stats, for_cameron) alembic.op.add_column( "quotes", sqlalchemy.Column( "game_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("games.id", ondelete="CASCADE", onupdate="CASCADE"))) alembic.op.add_column( "quotes", sqlalchemy.Column( "show_id", sqlalchemy.Integer, sqlalchemy.ForeignKey("shows.id", ondelete="CASCADE", onupdate="CASCADE"))) alembic.op.execute(""" UPDATE quotes SET show_id = shows.id FROM shows WHERE quotes.show = shows.name """) alembic.op.execute(""" UPDATE quotes SET game_id = game_per_show_data.game_id FROM game_per_show_data WHERE quotes.game = game_per_show_data.display_name AND game_per_show_data.show_id = quotes.show_id """) alembic.op.execute(""" UPDATE quotes SET game_id = games.id FROM games WHERE quotes.game = games.name """) alembic.op.drop_column("quotes", "game") alembic.op.drop_column("quotes", "show") data.pop("shows", None) data.pop("stats", None) with open(datafile, "w") as f: json.dump(data, f, indent=2, sort_keys=True)