def test_semantic_version_field(self) -> None: id_colname = "id" version_colname = "version" id_col = Column(id_colname, Integer, primary_key=True) version_col = Column(version_colname, SemanticVersionColType) table = Table("testtable", self.meta, id_col, version_col) table.create() v1 = Version("1.1.0") v2 = Version("2.0.1") v3 = Version("14.0.0") table.insert().values([ { id_colname: 1, version_colname: v1 }, { id_colname: 2, version_colname: v2 }, { id_colname: 3, version_colname: v3 }, ]).execute() select_fields = [id_col, version_col] rows = list( select(select_fields).select_from(table).order_by( id_col).execute()) self._assert_version_equal(rows[0][version_col], v1) self._assert_version_equal(rows[1][version_col], v2) self._assert_version_equal(rows[2][version_col], v3)
def test_iso_duration_field(self) -> None: id_colname = "id" duration_colname = "duration_iso" id_col = Column(id_colname, Integer, primary_key=True) duration_col = Column(duration_colname, PendulumDurationAsIsoTextColType) table = Table("testtable", self.meta, id_col, duration_col) table.create() d1 = Duration(years=1, months=3, seconds=3, microseconds=4) d2 = Duration(seconds=987.654321) d3 = Duration(days=-5) table.insert().values([ { id_colname: 1, duration_colname: d1 }, { id_colname: 2, duration_colname: d2 }, { id_colname: 3, duration_colname: d3 }, ]).execute() select_fields = [id_col, duration_col] rows = list( select(select_fields).select_from(table).order_by( id_col).execute()) self._assert_duration_equal(rows[0][duration_col], d1) self._assert_duration_equal(rows[1][duration_col], d2) self._assert_duration_equal(rows[2][duration_col], d3)
def _get_or_insert_summary_table(self, est: "ExtraSummaryTable") -> Table: """ Add an additional summary table to the dump, if it's not there already. Return the table (from the destination database). """ tablename = est.tablename if tablename not in self.tablenames_seen: self.tablenames_seen.add(tablename) table = Table(tablename, self.dst_metadata, *est.columns) self.dst_tables[tablename] = table self.dst_session.commit() table.create(self.dst_engine) return self.dst_tables[tablename]
def create_table(self): email_table = Table(self.table_name, self.metadata, Column('id', Integer, primary_key=True), Column('date', Text), Column('mime_type', Text), Column('from_addr', Text), Column('to_addr', Text), Column('subject', Text), Column('raw_body', Text), Column('cleaned_body', Text), Column('one_line', Text), Column('path', Text), Column('prediction', Integer), Column('probability', Float) ) email_table.create(self.engine)
def _create_dest_table(self, dst_table: Table) -> None: """ Creates a table in the destination database. """ tablename = dst_table.name if tablename in self.tablenames_created: return # don't create it twice # Create the table # log.debug("Adding table {!r} to dump output", tablename) # You have to use an engine, not a session, to create tables (or you # get "AttributeError: 'Session' object has no attribute # '_run_visitor'"). # However, you have to commit the session, or you get # "sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) # database is locked", since a session is also being used. self.dst_session.commit() dst_table.create(self.dst_engine) self.tablenames_created.add(tablename)
def create_sample_table(self, sample_table_name): if sample_table_name == self.table_name: print('Cannon use the same table name') return email_table = Table(sample_table_name, self.metadata, Column('id', Integer, primary_key=True), Column('date', Text), Column('mime_type', Text), Column('from_addr', Text), Column('to_addr', Text), Column('subject', Text), Column('raw_body', Text), Column('cleaned_body', Text), Column('one_line', Text), Column('path', Text), Column('prediction', Integer), Column('probability', Float), Column('manual_label', Integer) ) email_table.create(self.engine)
def test_phone_number_field(self) -> None: id_colname = "id" phone_number_colname = "phone_number" id_col = Column(id_colname, Integer, primary_key=True) phone_number_col = Column(phone_number_colname, PhoneNumberColType) table = Table("testtable", self.meta, id_col, phone_number_col) table.create() # https://en.wikipedia.org/wiki/Fictitious_telephone_number p1 = phonenumbers.parse("+44 (0)113 496 0123") p2 = phonenumbers.parse("+33 1 99 00 12 34 56") p3 = phonenumbers.parse("07700 900123", "GB") p4 = None table.insert().values([ { id_colname: 1, phone_number_colname: p1 }, { id_colname: 2, phone_number_colname: p2 }, { id_colname: 3, phone_number_colname: p3 }, { id_colname: 4, phone_number_colname: p4 }, ]).execute() select_fields = [id_col, phone_number_col] rows = list( select(select_fields).select_from(table).order_by( id_col).execute()) self.assertEqual(rows[0][phone_number_col], p1) self.assertEqual(rows[1][phone_number_col], p2) self.assertEqual(rows[2][phone_number_col], p3) self.assertIsNone(rows[3][phone_number_col])
def _add_dump_table_for_src_object(self, src_obj: object) -> None: """ - Mark the object's table as seen. - If we want it, add it to the metadata and execute a CREATE TABLE command. - We may translate the table en route. """ # noinspection PyUnresolvedReferences src_table = src_obj.__table__ # type: Table tablename = src_table.name self.tablenames_seen.add(tablename) # Skip the table? if self._dump_skip_table(tablename): return # Copy columns, dropping any we don't want, and dropping FK constraints dst_columns = [] # type: List[Column] for src_column in src_table.columns: # log.debug("trying {!r}", src_column.name) if self._dump_skip_column(tablename, src_column.name): # log.debug("... skipping {!r}", src_column.name) continue # You can't add the source column directly; you get # "sqlalchemy.exc.ArgumentError: Column object 'ccc' already # assigned to Table 'ttt'" copied_column = src_column.copy() copied_column.comment = src_column.comment # ... see SQLAlchemy trivial bug: # https://bitbucket.org/zzzeek/sqlalchemy/issues/4087/columncopy-doesnt-copy-comment-attribute # noqa if FOREIGN_KEY_CONSTRAINTS_IN_DUMP: copied_column.foreign_keys = set( fk.copy() for fk in src_column.foreign_keys ) log.warning("NOT WORKING: foreign key commands not being " "emitted") # but http://docs.sqlalchemy.org/en/latest/core/constraints.html # noqa # works fine under SQLite, even if the other table hasn't been # created yet. Does the table to which the FK refer have to be # in the metadata already? # That's quite possible, but I've not checked. # Would need to iterate through tables in dependency order, # like merge_db() does. else: # Probably blank already, as the copy() command only copies # non-constraint-bound ForeignKey objects, but to be sure: copied_column.foreign_keys = set() # type: Set[ForeignKey] # if src_column.foreign_keys: # log.debug("Column {}, FKs {!r} -> {!r}", src_column.name, # src_column.foreign_keys, # copied_column.foreign_keys) dst_columns.append(copied_column) # Add extra columns? if isinstance(src_obj, GenericTabletRecordMixin): for summary_element in src_obj.get_summaries(self.req): dst_columns.append(Column(summary_element.name, summary_element.coltype, comment=summary_element.comment)) # Create the table # log.debug("Adding table {!r} to dump output", tablename) dst_table = Table(tablename, self.dst_metadata, *dst_columns) self.dst_tables[tablename] = dst_table # You have to use an engine, not a session, to create tables (or you # get "AttributeError: 'Session' object has no attribute # '_run_visitor'"). # However, you have to commit the session, or you get # "sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) # database is locked", since a session is also being used. self.dst_session.commit() dst_table.create(self.dst_engine)
def test_iso_datetime_field(self) -> None: id_colname = "id" dt_local_colname = "dt_local" dt_utc_colname = "dt_utc" iso_colname = "iso" id_col = Column(id_colname, Integer, primary_key=True) dt_local_col = Column(dt_local_colname, DateTime) dt_utc_col = Column(dt_utc_colname, DateTime) iso_col = Column(iso_colname, PendulumDateTimeAsIsoTextColType) table = Table("testtable", self.meta, id_col, dt_local_col, dt_utc_col, iso_col) table.create() now = Pendulum.now() now_utc = now.in_tz(pendulum.UTC) yesterday = now.subtract(days=1) yesterday_utc = yesterday.in_tz(pendulum.UTC) table.insert().values([ { id_colname: 1, dt_local_colname: now, dt_utc_colname: now_utc, iso_colname: now, }, { id_colname: 2, dt_local_colname: yesterday, dt_utc_colname: yesterday_utc, iso_colname: yesterday, }, ]).execute() select_fields = [ id_col, dt_local_col, dt_utc_col, iso_col, func.length(dt_local_col).label("len_dt_local_col"), func.length(dt_utc_col).label("len_dt_utc_col"), func.length(iso_col).label("len_iso_col"), isotzdatetime_to_utcdatetime(iso_col).label("iso_to_utcdt"), unknown_field_to_utcdatetime(dt_utc_col).label( "uk_utcdt_to_utcdt"), unknown_field_to_utcdatetime(iso_col).label("uk_iso_to_utc_dt"), ] rows = list( select(select_fields).select_from(table).order_by( id_col).execute()) self._assert_dt_equal(rows[0][dt_local_col], now) self._assert_dt_equal(rows[0][dt_utc_col], now_utc) self._assert_dt_equal(rows[0][iso_colname], now) self._assert_dt_equal(rows[0]["iso_to_utcdt"], now_utc) self._assert_dt_equal(rows[0]["uk_utcdt_to_utcdt"], now_utc) self._assert_dt_equal(rows[0]["uk_iso_to_utc_dt"], now_utc) self._assert_dt_equal(rows[1][dt_local_col], yesterday) self._assert_dt_equal(rows[1][dt_utc_col], yesterday_utc) self._assert_dt_equal(rows[1][iso_colname], yesterday) self._assert_dt_equal(rows[1]["iso_to_utcdt"], yesterday_utc) self._assert_dt_equal(rows[1]["uk_utcdt_to_utcdt"], yesterday_utc) self._assert_dt_equal(rows[1]["uk_iso_to_utc_dt"], yesterday_utc)