示例#1
0
    def reflect_model(self, table_name, bind_key=None):
        """ 反向生成 ORM 的 Model
        :param table_name:
        :param bind_key:
        :return: ORMClass
        """
        with self._reflect_lock:
            if table_name in self._models:
                return self._models[table_name]

            engine = self.get_engine(bind_key)
            meta = MetaData(bind=engine)
            meta.reflect(only=[table_name])

            table = meta.tables[table_name]
            self._tables[table_name] = table

            Base = automap_base(metadata=meta)
            Base.prepare()

            model = getattr(Base.classes, table_name)
            model.__table__.metadata = None
            self._models[table_name] = model

            return model
 def tester():
     class FooObj(DataObject):
         partition_attribute = 'id'
     
     @schema([IntegerColumn(name='id')])
     class Foo(FooObj):
         identity_key_ = (('id', 'id'),)
         sort_key_ = ('id',)
     
     # Create a test database and table
     engine = create_engine('sqlite://')
     metadata = MetaData(bind=engine)
     foos_table = Foo.to_sqa_table(metadata, 'foos')
     metadata.create_all()
     
     # Define the mapping between tables and objects for writing
     writer_config = {Foo: SqaWriterConfig(foos_table),
                     }
     
     # Define the mapping between tables and objects for reading
     reader_config = {Foo: SqaReaderConfig(foos_table, engine_wrapper(engine), **reader_args)}
     
     # Create some objects
     foos = [Foo(id=i) for i in range(1000)]
     
     # Write the objects to the database
     writer = Foo.writer(writer_config)
     for foo in foos:
         writer.write(foo)
     
     def do_the_reading():
         return [foo for foo in Foo.reader(reader_config)]
     
     assertion(do_the_reading)
示例#3
0
    def reflect_table(self, table_name, bind_key=None):
        with self._reflect_lock:
            if table_name in self._tables:
                return self._tables[table_name]

            engine = self.get_engine(bind_key)
            meta = MetaData(bind=engine)
            meta.reflect(only=[table_name])

            table = meta.tables[table_name]
            table.metadata = None
            self._tables[table_name] = table

            return table
示例#4
0
def test_vector_greater_than():
    metadata = MetaData()
    table = Table('people', metadata,
                  Column('id', Integer, primary_key=True), 
                  Column('first_name', String),
                  Column('middle_name', String),
                  Column('last_name', String),
                  Column('blood_type', String))
    def random_person(idx):
        first = names.get_first_name()
        last = names.get_last_name()
        middle = random.choice([names.get_first_name, names.get_last_name, lambda: None])()
        blood_type = random.choice(['A', 'A', 'B', 'B', 'O', 'O', 'O', 'O', 'AB'])
        return {'id': idx,
                'first_name': first,
                'middle_name': middle,
                'last_name': last,
                'blood_type': blood_type
                }
    engine = create_engine('sqlite:///:memory:', echo=False)
    metadata.create_all(engine)
    def compare_results(compa, cols, vals):
        vals = tuple(map(none_to_minus_inf, vals))
        res = set([row['id'] for row in engine.execute(select(table.columns).where(compa))])
        all_ = [valmap(none_to_minus_inf, row) for row in engine.execute(select(table.columns))]
        cor = set()
        for row in all_:
            if tuple(row[col.name] for col in cols) > vals:
                cor.add(row['id'])
        assert_equal(res, cor)
    
    for i in range(1000):
        engine.execute(table.insert(random_person(i)))
    
    
    col_tuples = [(table.columns['id'],),
                  (table.columns['blood_type'], table.columns['id']),
                  (table.columns['blood_type'], table.columns['middle_name'], table.columns['id']),
                  (table.columns['blood_type'], table.columns['id'], table.columns['middle_name']),
                  (table.columns['middle_name'], table.columns['blood_type'], table.columns['id']),]
    val_tuples = [(5,),
                  ('AB', 500),
                  ('B', None, 500),
                  ('B', 500, None),
                  (None, 'B', 500)]
    for cols, vals in zip(col_tuples, val_tuples):
        compare_results(vector_greater_than(cols, vals), cols, vals)
    def run(self):
        # Source reflection
        source_meta = MetaData()
        source_meta.reflect(bind=self.source_engine)
        source_tables = source_meta.tables

        source_table_names = [k for k, v in source_tables.items()]

        # Destination Binding
        destination_meta = MetaData(bind=self.destination_engine)
        for name, table in source_tables.items():
            table.metadata = destination_meta
            if name in self.settings.exclude_data.keys():
                table.__mapper_args__ = {"exclude_properties": self.settings.exclude_data[name]}

        # Drop table for testing purposes
        # destination_meta.drop_all(self.destination_engine)
        for table in source_table_names:
            self.sessions.destination.execute("DROP TABLE {table};".format(table=table))
            self.sessions.destination.commit()
            print("DROPPED TABLE {table}".format(table=table))

        # Begin migration
        source_meta.create_all(self.destination_engine)

        source_data = {table: self.sessions.source.query(source_tables[table]).all() for table in source_table_names}

        for table in source_table_names:
            print("Migrating:", table)
            # if table in self.settings.exclude_data.keys():
            #     pprint(source_tables[table].__mapper_args__)
            #     exit(1)
            for row in source_data[table]:
                try:
                    self.sessions.destination.execute(source_tables[table].insert(row))
                except StatementError:
                    print("Bad data in table: ", table, "row data:\n", row[0], "Error:", sys.exc_info()[0])
            print("Data for:", table, "added to the queue..")

        self.sessions.destination.commit()
        print("Migration Complete!")
def connect_test_database(url: Union[str, URL], **engine_kwargs) -> Connection:
    """
    Connect to the given database and drop any existing tables in it.

    For SQLite URLs pointing to a file, the target database file will be deleted and a new one is
    created in its place.

    :param url: connection URL for the database
    :param engine_kwargs: additional keyword arguments passed to
        :meth:`asphalt.sqlalchemy.component.SQLAlchemyComponent.create_engine`
    :return: a connection object

    """
    assert check_argument_types()
    _context_attr, engine = SQLAlchemyComponent.configure_engine(url=url, **engine_kwargs)

    if engine.dialect.name == 'sqlite':
        # SQLite does not support dropping constraints and it's faster to just delete the file
        if engine.url.database not in (None, ':memory:') and os.path.isfile(engine.url.database):
            os.remove(engine.url.database)

        connection = engine.connect()
    else:
        # Reflect the schema to get the list of the tables and constraints left over from the
        # previous run
        connection = engine.connect()
        metadata = MetaData(connection, reflect=True)

        # Drop all the foreign key constraints so we can drop the tables in any order
        for table in metadata.tables.values():
            for fk in table.foreign_keys:
                connection.execute(DropConstraint(fk.constraint))

        # Drop the tables
        metadata.drop_all()

    return connection
示例#7
0
 def test_reflect_select(self, engine, connection):
     one_row_complex = Table('one_row_complex', MetaData(bind=engine), autoload=True)
     self.assertEqual(len(one_row_complex.c), 15)
     self.assertIsInstance(one_row_complex.c.col_string, Column)
     rows = one_row_complex.select().execute().fetchall()
     self.assertEqual(len(rows), 1)
     self.assertEqual(list(rows[0]), [
         True,
         127,
         32767,
         2147483647,
         9223372036854775807,
         0.5,
         0.25,
         'a string',
         datetime(2017, 1, 1, 0, 0, 0),
         date(2017, 1, 2),
         b'123',
         '[1, 2]',
         '{1=2, 3=4}',
         '{a=1, b=2}',
         Decimal('0.1'),
     ])
     self.assertIsInstance(one_row_complex.c.col_boolean.type, BOOLEAN)
     self.assertIsInstance(one_row_complex.c.col_tinyint.type, INTEGER)
     self.assertIsInstance(one_row_complex.c.col_smallint.type, INTEGER)
     self.assertIsInstance(one_row_complex.c.col_int.type, INTEGER)
     self.assertIsInstance(one_row_complex.c.col_bigint.type, BIGINT)
     self.assertIsInstance(one_row_complex.c.col_float.type, FLOAT)
     self.assertIsInstance(one_row_complex.c.col_double.type, FLOAT)
     self.assertIsInstance(one_row_complex.c.col_string.type, type(STRINGTYPE))
     self.assertIsInstance(one_row_complex.c.col_timestamp.type, TIMESTAMP)
     self.assertIsInstance(one_row_complex.c.col_date.type, DATE)
     self.assertIsInstance(one_row_complex.c.col_binary.type, BINARY)
     self.assertIsInstance(one_row_complex.c.col_array.type, type(STRINGTYPE))
     self.assertIsInstance(one_row_complex.c.col_map.type, type(STRINGTYPE))
     self.assertIsInstance(one_row_complex.c.col_struct.type, type(STRINGTYPE))
     self.assertIsInstance(one_row_complex.c.col_decimal.type, DECIMAL)
示例#8
0
    def connection(self):
        test = ""

        try:
            test_conn = self.conn_str.find("sqlite")
            if test_conn == 0:
                self.engine = create_engine(self.conn_str,
                                            echo=eval(self.boolean))
            else:
                self.engine = create_engine(self.conn_str,
                                            max_overflow=-1,
                                            echo=eval(self.boolean))
            self.metadata = MetaData(self.engine)
            self.engine.connect()
        except Exception as e:
            test = str(e)

        try:
            db_upd = DB_update()
            db_upd.update_table()
        except Exception as e:
            test = str(e)
        return test
示例#9
0
 def test_reflect_table_include_columns(self, engine, conn):
     one_row_complex = Table("one_row_complex", MetaData(bind=engine))
     version = float(
         re.search(r"^([\d]+\.[\d]+)\..+", sqlalchemy.__version__).group(1))
     if version <= 1.2:
         engine.dialect.reflecttable(
             conn,
             one_row_complex,
             include_columns=["col_int"],
             exclude_columns=[],
         )
     else:
         engine.dialect.reflecttable(
             conn,
             one_row_complex,
             include_columns=["col_int"],
             exclude_columns=[],
             resolve_fks=True,
         )
     self.assertEqual(len(one_row_complex.c), 1)
     self.assertIsNotNone(one_row_complex.c.col_int)
     self.assertRaises(AttributeError,
                       lambda: one_row_complex.c.col_tinyint)
示例#10
0
 def test_reflect_table_include_columns(self, engine, conn):
     one_row_complex = Table("one_row_complex", MetaData(bind=engine))
     version = float(
         re.search(r"^([\d]+\.[\d]+)\..+", sqlalchemy.__version__).group(1))
     if version <= 1.2:
         engine.dialect.reflecttable(conn,
                                     one_row_complex,
                                     include_columns=["col_int"],
                                     exclude_columns=[])
     else:
         # https://docs.sqlalchemy.org/en/13/changelog/changelog_13.html#
         # change-64ac776996da1a5c3e3460b4c0f0b257
         engine.dialect.reflecttable(
             conn,
             one_row_complex,
             include_columns=["col_int"],
             exclude_columns=[],
             resolve_fks=True,
         )
     self.assertEqual(len(one_row_complex.c), 1)
     self.assertIsNotNone(one_row_complex.c.col_int)
     self.assertRaises(AttributeError,
                       lambda: one_row_complex.c.col_tinyint)
def test_connect_test_database(database_url):
    metadata1 = MetaData()
    Table('table', metadata1, Column('column1', Integer, primary_key=True))
    Table('table2', metadata1, Column('fk_column', ForeignKey('table.column1', name='fk_1')))
    with connect_test_database(database_url) as connection:
        metadata1.create_all(connection)

    metadata2 = MetaData()
    Table('table', metadata2, Column('column2', Unicode(20)))
    with connect_test_database(database_url) as connection:
        metadata2.create_all(connection)
        metadata3 = MetaData(bind=connection, reflect=True)

    assert len(metadata3.tables) == 1

    table = metadata3.tables['table']
    assert len(table.columns) == 1
    assert 'column2' in table.columns.keys()
示例#12
0
    def __init__(self, db_url):
        self.db_url = db_url
        self.engine = create_engine(self.db_url, echo=False)
        self.meta = MetaData()

        # connect
        self.conn = self.engine.connect()
        self.inspector = inspect(self.engine)
        self.db_all_table_names = self.inspector.get_table_names()

        # as a simple self-test of working SQLalchemy+Postgres,
        # read and parse event_types table
        self.event_types = {}
        self.db_table_event_types = self.mount_table_from_db('event_types')
        res = self.conn.execute(select([self.db_table_event_types]))
        for row in res:
            self.event_types[row.id] = row.type

        # https://stackoverflow.com/questions/5225780/turn-off-a-warning-in-sqlalchemy
        with warnings.catch_warnings():
            warnings.simplefilter("ignore", category=sa_exc.SAWarning)

            # we are always interested in the events table, so mount it immediately
            self.table_events = self.mount_table_from_db('events')

        self.stat = None

        df_ctunits = pd.read_sql("SELECT * FROM ct_units", self.engine)

        if 'machine_key' in df_ctunits:
            self.is2016format = False
            self.is2017format = True
            self.hostname_in_events = 'ct_units_id'
        else:
            self.is2016format = True
            self.is2017format = False
            self.hostname_in_events = 'hostnames_id'
示例#13
0
    def verify_if_not_exists_dupe_values(self):

        meta = MetaData()
        table = Table(self.table_name,
                      meta,
                      autoload=True,
                      autoload_with=engine)
        primary_key = table.primary_key.columns.values()[0].name
        engine_table = pd.read_sql_query(
            f'SELECT {primary_key} from {self.table_name};', engine)

        try:
            mask = engine_table[primary_key].isin(self.df[primary_key])
            if engine_table[primary_key].loc[mask].shape[0] != 0:
                for i in engine_table[primary_key].loc[mask]:
                    statement = 'DELETE FROM {} WHERE {} = {};'.format(
                        self.table_name, primary_key,
                        i)  # Query sintax to exclude values
                    engine.connect().execute(statement)
                print('DUPLICATED PRIMARY KEY VALUES WERE EXCLUDED')
            return 1
        except Exception as ex:
            print("OCCOUR EXCEPTIOM :( => {}".format(ex))
            return 0
 def save_the_file_numbers(series: pd.DataFrame) -> pd.DataFrame:
     """
     Saves the file_number for each of the decision ids
     :param series:
     :return:
     """
     query = f"SELECT decision_id FROM decision WHERE file_id = '{series['file_id']}'"
     series['decision_id'] = pd.read_sql(query,
                                         engine.connect())["decision_id"][0]
     with engine.connect() as conn:
         t = Table('file_number', MetaData(), autoload_with=engine)
         # Delete and reinsert as no upsert command is available
         stmt = t.delete().where(delete_stmt_decisions_with_df(series))
         conn.execute(stmt)
     series['text'] = series['file_number'].strip(
     )  # .map(lambda x: x.strip())
     save_to_db(series[['decision_id', 'text']], 'file_number')
     if ('file_number_additional' in series
             and series['file_number_additional'] is not None
             and len(series['file_number_additional']) > 0):
         series['text'] = series['file_number_additional'].strip(
         )  # .map(lambda x: x.strip())
         save_to_db(series[['decision_id', 'text']], 'file_number')
     return series
def clear_database(
    engine: Engine | Connection, schemas: Iterable[str] = ()) -> None:
    """
    Clear any tables from an existing database using a synchronous connection/engine.

    :param engine: the engine or connection to use
    :param schemas: full list of schema names to expect (ignored for SQLite)

    """
    metadatas = []
    all_schemas: tuple[str | None, ...] = (None, )
    all_schemas += tuple(schemas)
    for schema in all_schemas:
        # Reflect the schema to get the list of the tables, views and constraints
        metadata = MetaData()
        metadata.reflect(engine, schema=schema, views=True)
        metadatas.append(metadata)

    for metadata in metadatas:
        metadata.drop_all(engine, checkfirst=False)
示例#16
0
def engine(request, tmpdir_factory):
    engine = None
    if request.param == 'sqlite-file':
        tmpdir = tmpdir_factory.mktemp('asphalt-sqlalchemy')
        db_path = str(tmpdir.join('test.db'))
        engine = create_engine('sqlite:///' + db_path)
    elif request.param == 'sqlite-memory':
        engine = create_engine('sqlite:///:memory:')
    elif request.param == 'mysql':
        url = request.getfixturevalue('mysql_url')
        engine = create_engine(url)
    elif request.param == 'postgresql':
        url = request.getfixturevalue('postgresql_url')
        engine = create_engine(url)

    if engine.dialect.name != 'sqlite':
        engine.execute(CreateSchema('altschema'))

    if request.param != 'sqlite-memory':
        metadata = MetaData()
        Table('table', metadata, Column('column1', Integer, primary_key=True))
        Table('table2', metadata,
              Column('fk_column', ForeignKey('table.column1')))
        if engine.dialect.name != 'sqlite':
            Table('table3',
                  metadata,
                  Column('fk_column', Integer),
                  schema='altschema')

        metadata.create_all(engine)

    yield engine

    if engine.dialect.name != 'sqlite':
        metadata.drop_all(engine)
        engine.execute(DropSchema('altschema'))
示例#17
0
 def test_char_length(self, engine, connection):
     one_row_complex = Table('one_row_complex', MetaData(bind=engine), autoload=True)
     result = sqlalchemy.select([
         sqlalchemy.func.char_length(one_row_complex.c.col_string)
     ]).execute().scalar()
     self.assertEqual(result, len('a string'))
示例#18
0
 def test_has_table(self, engine, connection):
     self.assertTrue(Table('one_row', MetaData(bind=engine)).exists())
     self.assertFalse(Table('this_table_does_not_exist', MetaData(bind=engine)).exists())
示例#19
0
class EnronDB:
    def __init__(self):
        self.engine = None
        self.metadata = MetaData()
        
    def init(self, host, username, password, db_name):
        engine_desc = 'mysql://%s:%s@%s/%s' % (username, password, host, db_name)
        try:
            self.engine = create_engine(engine_desc)
            self.metadata.reflect(self.engine)
        except:
            print "Unexpected error:", sys.exc_info()[0]
            return False
        return True    
    
    # RAW_EMAIL table
    def insert_email(self, email):
        self.insert_to_table(email, "raw_email")
    
    # RAW_EMAIL table
    def insert_cleaned_email(self, email):
        self.insert_to_table(email, "cleaned_email")

    def insert_to_table(self, email, table_name):
        if not isinstance(email, Email):
            print 'ERROR: input must be of type Email'
            return
        
        email_table = Table(table_name, self.metadata)
        ins_stmt = email_table.insert()
        conn = self.engine.connect()
        result = conn.execute(ins_stmt, date=email.date,
                              mime_type=email.mime_type,
                              from_addr=email.from_addr,
                              to_addr=email.to_addr,
                              subject=email.subject,
                              body=email.body,
                              path=email.path,
                              label=email.label)
    
    def get_all_content(self):
        email_table = Table('raw_email', self.metadata)
        sel_stmt = select([email_table.c.subject, email_table.c.body])
        rp = self.engine.execute(sel_stmt)
        all_content = ""
        for record in rp:
            all_content += record.subject + " "
            all_content += record.body + " "
        return all_content
    
    def add_username(self):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.path])
        rp = self.engine.execute(sel_stmt)
        conn = self.engine.connect()
        for record in rp:
#             print(record)
            p = "\/[^\/]*\/([^\/]+)"  # match the content between the second / and the third /
            match = re.match(p, record.path)
            if match:
                username = match.group(1)
                stmt = email_table.update().where(email_table.c.id == record.id).values(username=username)
                conn.execute(stmt)
            else:
                print("Error! " + record.path)
                exit(0)
    
    def update_brushed_email_is_scheduling(self, email_id, is_scheduling):
        email_table = Table('brushed_email', self.metadata)
        conn = self.engine.connect()
        stmt = email_table.update().where(email_table.c.id == email_id).values(is_scheduling=is_scheduling)
        conn.execute(stmt)

    def get_all_dates(self):
        email_table = Table('raw_email', self.metadata)
        sel_stmt = select([email_table.c.date])
        rp = self.engine.execute(sel_stmt)
        dates = []
        for record in rp:
            dates.append(record.date.strftime("%y%m%d"))
        return dates
    
    def get_all_subjects(self):
        email_table = Table('raw_email', self.metadata)
        sel_stmt = select([email_table.c.subject])
        rp = self.engine.execute(sel_stmt)
        subjects = []
        for record in rp:
            subjects.append(record.subject)
        return subjects    
    
    def get_all_bodies_with_id(self):
        email_table = Table('raw_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.body])
        rp = self.engine.execute(sel_stmt)
        bodies = []
        for record in rp:
            bodies.append((record.id, record.body))
        return bodies    
    
    def get_body(self, email_id):
        email_table = Table('raw_email', self.metadata)
        sel_stmt = select([email_table.c.body]).where(email_table.c.id == email_id)
        rp = self.engine.execute(sel_stmt)
        record = rp.first()
        return record.body
    
    def get_all_bodies(self):
        email_table = Table('raw_email', self.metadata)
        sel_stmt = select([email_table.c.body])
        rp = self.engine.execute(sel_stmt)
        bodies = []
        for record in rp:
            bodies.append(record.body)
        return bodies

    def get_all_brushed_emails(self):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_adddr, \
                           email_table.c.subject, email_table.c.body, email_table.c.one_line, \
                           email_table.c.path, email_table.c.label, email_table.c.is_scheduling])
        rp = self.engine.execute(sel_stmt)
        emails = []
        for record in rp:
            email = Email()
            if record is not None:
                email.id = record.id
                email.date = record.date
                email.mime_type = record.mime_type
                email.from_addr = record.from_addr
                email.to_addr = record.to_adddr
                email.subject = record.subject
                email.body = record.body
                email.one_line = record.one_line
                email.path = record.path
                email.label = record.label
                email.is_scheduling = record.is_scheduling or 0
            emails.append(email)
        return emails
    
    def get_brushed_email(self, email_id):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_adddr, \
                           email_table.c.subject, email_table.c.body, \
                           email_table.c.path, email_table.c.label, email_table.c.is_scheduling]).where(email_table.c.id == email_id)
        rp = self.engine.execute(sel_stmt)
        record = rp.first()
        email = Email()
        if record is not None:
            email.date = record.date
            email.mime_type = record.mime_type
            email.from_addr = record.from_addr
            email.to_addr = record.to_adddr
            email.subject = record.subject
            email.body = record.body
            email.path = record.path
            email.label = record.label
            email.is_scheduling = record.is_scheduling
        
        return email
    
    def get_email(self, email_id):
        email_table = Table('raw_email', self.metadata)
        sel_stmt = select([email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_addr, \
                           email_table.c.subject, email_table.c.body, \
                           email_table.c.path, email_table.c.label]).where(email_table.c.id == email_id)
        rp = self.engine.execute(sel_stmt)
        record = rp.first()
        email = Email()
        if record is not None:
            email.date = record.date
            email.mime_type = record.mime_type
            email.from_addr = record.from_addr
            email.to_addr = record.to_addr
            email.subject = record.subject
            email.body = record.body
            email.path = record.path
            email.label = record.label
        
        return email
        
    def get_emails_from(self, from_addr):
        email_table = Table('raw_email', self.metadata)
        sel_stmt = select([email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_addr, \
                           email_table.c.subject, email_table.c.body, \
                           email_table.c.path, email_table.c.label]).where(email_table.c.from_addrr == from_addr)
        rp = self.engine.execute(sel_stmt)
        email_list = []
        for record in rp:
            email = Email()
            email.date = record.date
            email.mime_type = record.mime_type
            email.from_addr = record.from_addr
            email.to_addr = record.to_addr
            email.subject = record.subject
            email.body = record.body
            email.path = record.path
            email.label = record.label 
            email_list.append(email)
            
        return email_list
    
    def get_emails_before(self, query_date):
        email_table = Table('raw_email', self.metadata)
        sel_stmt = select([email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_addr, \
                           email_table.c.subject, email_table.c.body, \
                           email_table.c.path, email_table.c.label]).where(email_table.c.date <= query_date)
        rp = self.engine.execute(sel_stmt)
        email_list = []
        for record in rp:
            email = Email()
            email.date = record.date
            email.mime_type = record.mime_type
            email.from_addr = record.from_addr
            email.to_addr = record.to_addr
            email.subject = record.subject
            email.body = record.body
            email.path = record.path
            email.label = record.label 
            email_list.append(email)
            
        return email_list        
        
    # EMAIL_ADDRESS table
    def insert_address(self, email_address):
        if type(email) != EmailAddress:
            print 'ERROR: input must be of type EmailAddress'
            return
        
        email_address_table = Table('email_address', self.metadata)
        ins_stmt = email_address_table.insert()
        conn = self.engine.connect()
        result = conn.execute(ins_stmt, address=email_address.address,
                              name=email_address.name)
    
    def get_address(self, address_id):
        email_address_table = Table('email_address', self.metadata)
        sel_stmt = select([email_address_table.c.name, email_address_table.c.address]).where(email_address_table.c.id == address_id)
        rp = self.engine.execute(sel_stmt)
        record = rp.first()
        email_address = EmailAddress()
        if record is not None:
            email_address.name = record.name
            email_address.address = record.address
        
        return email_address
    
    def get_address_name(self, address_id):
        email_address_table = Table('email_address', self.metadata)
        sel_stmt = select([email_address_table.c.name]).where(email_address_table.c.id == address_id)
        rp = self.engine.execute(sel_stmt)
        record = rp.first()
        email_address = EmailAddress()
        if record is not None:
            email_address.name = record.name
            email_address.address = record.address
        
        return email_address    
    
    # BRUSHED_EMAIL table
    def count_per_label(self, label):
        rp = self.engine.execute('select count(label) from brushed_email where label=%d'%(label))
        res = rp.first()
        return long(res[0])
    
    def get_all_brushed_labels_with_id(self):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.label])
        rp = self.engine.execute(sel_stmt)
        labels = []
        for record in rp:
            labels.append((record.id, record.label))
        return labels  
    
    def get_all_brushed_bodies_with_id(self):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.body])
        rp = self.engine.execute(sel_stmt)
        bodies = []
        for record in rp:
            bodies.append((record.id, record.body))
        return bodies

    def get_all_brushed_body_summary_with_id(self):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.subject, email_table.c.body, email_table.c.summary])
        rp = self.engine.execute(sel_stmt)
        bodies = []
        for record in rp:
            bodies.append((record.id, record.subject, record.body, record.summary))
        return bodies

    def get_all_brushed_lines_with_id(self):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.lines])
        rp = self.engine.execute(sel_stmt)
        lines = []
        for record in rp:
            lines.append((record.id, record.lines))
        return lines
    
    def get_all_brushed_verbs_with_id(self):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.verbs])
        rp = self.engine.execute(sel_stmt)
        brushed_verbs = []
        for record in rp:
            brushed_verbs.append((record.id, record.verbs))
        return brushed_verbs
    
    def get_all_brushed_verbs_per_label(self, label):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.verbs]).where(email_table.c.label==label)
        rp = self.engine.execute(sel_stmt)
        brushed_verbs = []
        for record in rp:
            brushed_verbs.append((record.id, record.verbs))
        return brushed_verbs
    
    def get_all_one_liners_per_label(self, label):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.one_line]).where(email_table.c.label==label)
        rp = self.engine.execute(sel_stmt)
        one_line = []
        for record in rp:
            one_line.append((record.id, record.one_line))
        return one_line    

    def update_brushed_body(self,email_id, body):
        brushed_table = Table('brushed_email', self.metadata)
        u = update(brushed_table)
        u = u.values(body=body)
        u = u.where(brushed_table.c.id==email_id)
        conn = self.engine.connect()
        result = conn.execute(u)
        
    def update_brushed_lines(self,email_id, msg_lines):
        brushed_table = Table('brushed_email', self.metadata)
        u = update(brushed_table)
        u = u.values(lines=msg_lines)
        u = u.where(brushed_table.c.id==email_id)
        conn = self.engine.connect()
        result = conn.execute(u)
    
    def update_brushed_one_line(self,email_id, one_line):
        brushed_table = Table('brushed_email', self.metadata)
        u = update(brushed_table)
        u = u.values(one_line=one_line)
        u = u.where(brushed_table.c.id==email_id)
        conn = self.engine.connect()
        result = conn.execute(u)
        
    def update_brushed_verbs(self, email_id, verbs):
        brushed_table = Table('brushed_email', self.metadata)
        u = update(brushed_table)
        u = u.values(verbs=verbs)
        u = u.where(brushed_table.c.id==email_id)
        conn = self.engine.connect()
        result = conn.execute(u)
        
    def update_brushed_summary(self, email_id, summary):
        brushed_table = Table('brushed_email', self.metadata)
        u = update(brushed_table)
        u = u.values(summary=summary)
        u = u.where(brushed_table.c.id==email_id)
        conn = self.engine.connect()
        result = conn.execute(u)


    # additional dataset, out of the labelled data
    def insert_brushed_email_more(self, email):
        if not isinstance(email, Email):
            print 'ERROR: input must be of type Email'
            return
        
        email_table = Table("brushed_email_more", self.metadata)
        ins_stmt = email_table.insert()
        conn = self.engine.connect()
        conn.execute(ins_stmt, date=email.date,
                              mime_type=email.mime_type,
                              from_addr=email.from_addr,
                              to_addr=email.to_addr,
                              subject=email.subject,
                              raw_body=email.body,
                              body=email.body,
                              all_lines=email.all_lines,
                              one_line=email.one_line,
                              path=email.path,
                              label=email.label,
                              prediction=email.prediction) 

    def insert_cleaned_email_full(self, email):
        if not isinstance(email, Email):
            print 'ERROR: input must be of type Email'
            return
        
        email_table = Table("email_full", self.metadata)
        ins_stmt = email_table.insert()
        conn = self.engine.connect()
        conn.execute(ins_stmt, date=email.date,
                              mime_type=email.mime_type,
                              from_addr=email.from_addr,
                              to_addr=email.to_addr,
                              subject=email.subject,
                              raw_body=email.raw_body,
                              body=email.body,
                              all_lines=email.all_lines,
                              one_line=email.one_line,
                              path=email.path) 
    
    def get_raw_bodies_with_id(self):
        email_table = Table('email_full', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.raw_body])
        rp = self.engine.execute(sel_stmt)
        bodies = []
        for record in rp:
            bodies.append((record.id, record.raw_body))
        return bodies 
    
    def update_brushed_body_full(self,email_id, body):
        brushed_table = Table('email_full', self.metadata)
        u = update(brushed_table)
        u = u.values(body=body)
        u = u.where(brushed_table.c.id==email_id)
        conn = self.engine.connect()
        result = conn.execute(u)
    
    def update_brushed_lines_full(self,email_id, msg_lines):
        brushed_table = Table('email_full', self.metadata)
        u = update(brushed_table)
        u = u.values(all_lines=msg_lines)
        u = u.where(brushed_table.c.id==email_id)
        conn = self.engine.connect()
        result = conn.execute(u)
    
    def get_all_brushed_lines_with_id_full(self):
        email_table = Table('email_full', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.all_lines])
        rp = self.engine.execute(sel_stmt)
        lines = []
        for record in rp:
            lines.append((record.id, record.all_lines))
        return lines
    
    def update_brushed_one_line_full(self,email_id, one_line):
        brushed_table = Table('email_full', self.metadata)
        u = update(brushed_table)
        u = u.values(one_line=one_line)
        u = u.where(brushed_table.c.id==email_id)
        conn = self.engine.connect()
        result = conn.execute(u)
    
    def get_email_full(self, email_id):
        email_table = Table('email_full', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.raw_body]).where(email_table.c.id==email_id)
        rp = self.engine.execute(sel_stmt)
        bodies = []
        for record in rp:
            bodies.append((record.id, record.raw_body))
        return bodies[0]
    
    def get_all_brushed_emails_full(self):
        email_table = Table('email_full', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_addr, \
                           email_table.c.subject, email_table.c.body, email_table.c.one_line, \
                           email_table.c.path, email_table.c.label, email_table.c.is_scheduling])
        rp = self.engine.execute(sel_stmt)
        emails = []
        for record in rp:
            email = Email()
            if record is not None:
                email.id = record.id
                email.date = record.date
                email.mime_type = record.mime_type
                email.from_addr = record.from_addr
                email.to_addr = record.to_addr
                email.subject = record.subject
                email.body = record.body
                email.one_line = record.one_line
                email.path = record.path
                email.label = record.label
                email.is_scheduling = record.is_scheduling or 0
            emails.append(email)
        return emails
    
    def get_all_brushed_email_more(self):
        email_table = Table('brushed_email_more', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_addr, \
                           email_table.c.subject, email_table.c.body, email_table.c.one_line, \
                           email_table.c.path, email_table.c.label, email_table.c.is_scheduling])
        rp = self.engine.execute(sel_stmt)
        emails = []
        for record in rp:
            email = Email()
            if record is not None:
                email.id = record.id
                email.date = record.date
                email.mime_type = record.mime_type
                email.from_addr = record.from_addr
                email.to_addr = record.to_addr
                email.subject = record.subject
                email.body = record.body
                email.one_line = record.one_line
                email.path = record.path
                email.label = record.label
                email.is_scheduling = record.is_scheduling or 0
            emails.append(email)
        return emails
        
示例#20
0
    def save_data_to_database(self, df: pd.DataFrame, engine: Engine):

        if not AbstractPreprocessor._check_write_privilege(engine):
            AbstractPreprocessor.create_dir(self.output_dir, os.getlogin())
            path = Path.joinpath(self.output_dir, os.getlogin(),
                                 datetime.now().isoformat() + '.json')

            with path.open("a") as f:
                df.to_json(f)
            return

        df = df.loc[df['language'] != '--']
        if df.empty:
            return

        df = self.run_tokenizer(df)

        with self.get_engine(self.db_scrc).connect() as conn:
            # Load the different tables
            t = Table('section', MetaData(), autoload_with=engine)
            t_paragraph = Table('paragraph', MetaData(), autoload_with=engine)
            t_num_tokens = Table('num_tokens',
                                 MetaData(),
                                 autoload_with=engine)

            # Delete and reinsert as no upsert command is available. This pattern is used multiple times in this method
            if not isinstance(df, pd.DataFrame) or df.empty:
                # empty dfs are given as dicts, so no need to save
                return
            stmt = t.delete().where(delete_stmt_decisions_with_df(df))
            conn.execute(stmt)
            stmt = t_paragraph.delete().where(
                delete_stmt_decisions_with_df(df))
            conn.execute(stmt)

            for idx, row in df.iterrows():
                if idx % 50 == 0:
                    self.logger.info(f'Saving decision {idx + 1} from chunk')
                if row['sections'] is None or row['sections'].keys is None:
                    continue

                for k in row['sections'].keys():
                    decision_id_str = str(row['decision_id'])
                    if decision_id_str == '':
                        continue
                    section_type_id = k.value
                    # insert section
                    section_dict = {
                        "decision_id": decision_id_str,
                        "section_type_id": section_type_id,
                        "section_text": row['sections'][k]
                    }
                    stmt = t.insert().returning(text("section_id")).values(
                        [section_dict])
                    section_id = conn.execute(stmt).fetchone()['section_id']

                    # Add num tokens
                    tokens_per_section = {
                        'section_id': str(section_id),
                        'num_tokens_spacy': row[k.name + '_spacy'],
                        'num_tokens_bert': row[k.name + '_bert']
                    }

                    stmt = t_num_tokens.insert().values([tokens_per_section])
                    conn.execute(stmt)

                    # Add all paragraphs
                    paragraph_dicts = []
                    for paragraph in row['sections'][k]:
                        paragraph = paragraph.strip()
                        if len(paragraph) == 0: continue
                        paragraph_dict = {
                            'section_id': str(section_id),
                            "decision_id": decision_id_str,
                            'paragraph_text': paragraph,
                            'first_level': None,
                            'second_level': None,
                            'third_level': None
                        }
                        paragraph_dicts.append(paragraph_dict)
                    if len(paragraph_dicts) > 0:
                        stmt = t_paragraph.insert().values(paragraph_dicts)
                        conn.execute(stmt)
示例#21
0
 def test_reflect_table(self, engine, conn):
     one_row = Table("one_row", MetaData(bind=engine), autoload=True)
     self.assertEqual(len(one_row.c), 1)
     self.assertIsNotNone(one_row.c.number_of_rows)
示例#22
0
class Entity(declarative_base()):
    '''
    Abstract entity class.
    '''
    __abstract__ = True

    naming_convention = {
        'pk': 'pk_%(table_name)s',
        'fk': 'fk_%(table_name)s_%(column_0_name)s',
        'ix': 'ix_%(column_0_label)s',
        'uq': 'uq_%(table_name)s_%(column_0_name)s',
    }

    metadata = MetaData(naming_convention=naming_convention)

    @hybrid_property
    def table(self):
        '''
        Shortcut property for table name.

        Returns:
            str: The table name
        '''
        return str(self.__table__.name)

    @hybrid_property
    def columns(self):
        '''
        Shortcut property for table column names.

        Returns:
            tuple: The table column names
        '''
        return tuple(column.name for column in self.__table__.columns)

    @hybrid_property
    def values(self):
        '''
        Shortcut property for table column values.

        Returns:
            tuple: The table column values
        '''
        return tuple(
            getattr(self, column.name) for column in self.__table__.columns)

    @hybrid_property
    def data(self):
        '''
        Shortcut property for table data.

        Returns:
            dict: The table columns/values pairs
        '''
        return {
            column.name: getattr(self, column.name)
            for column in self.__table__.columns
        }

    @classmethod
    def make(cls, row):
        '''
        Creates a new entity instance from the given database row.

        Arguments:
            row (places.databases.entities.DatabaseRow): The database row

        Returns:
            Entity: A new entity instance with given row data
        '''
        return cls(
            **{
                column: getattr(row, key)
                for column, key in iteritems(cls.__columns__)
            })

    def hydrate(self, row):
        '''
        Fills an existing entity instance from the given database row.

        Arguments:
            row (places.databases.entities.DatabaseRow): The database row
        '''
        for column, key in iteritems(self.__columns__):
            setattr(self, column, getattr(row, key))
示例#23
0
 def __init__(self):
     self.engine = None
     self.metadata = MetaData()
示例#24
0
def downgrade():

    # convert activity enum
    activity_conversions = [
        ('activity_rate_w1', 'activity_rate_50'),
        ('activity_rate_m2', 'activity_rate_20'),
        ('activity_rate_y5', 'activity_rate_10'),
        ('activity_rate_y5', 'activity_rate_5'),
    ]
    old_activity_type = sa.Enum('activity_rate_y5',
                                'activity_rate_m2',
                                'activity_rate_w1',
                                name='activity_rate',
                                schema='guidebook')
    new_activity_type = sa.Enum('activity_rate_150',
                                'activity_rate_50',
                                'activity_rate_30',
                                'activity_rate_20',
                                'activity_rate_10',
                                'activity_rate_5',
                                'activity_rate_1',
                                name='activity_rate_',
                                schema='guidebook')
    new_activity_type.create(op.get_bind())

    op.add_column('xreports',
                  sa.Column('activity_rate_', new_activity_type,
                            nullable=True),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('activity_rate_', new_activity_type,
                            nullable=True),
                  schema='guidebook')

    xr = Table('xreports',
               MetaData(),
               sa.Column('activity_rate', old_activity_type),
               sa.Column('activity_rate_', new_activity_type, nullable=True),
               schema='guidebook')
    xra = Table('xreports_archives',
                MetaData(),
                sa.Column('activity_rate', old_activity_type),
                sa.Column('activity_rate_', new_activity_type, nullable=True),
                schema='guidebook')
    for (old_value, new_value) in activity_conversions:
        op.execute(xr.update().where(
            xr.c.activity_rate == op.inline_literal(old_value)).values(
                activity_rate_=op.inline_literal(new_value)))
        op.execute(xra.update().where(
            xra.c.activity_rate == op.inline_literal(old_value)).values(
                activity_rate_=op.inline_literal(new_value)))

    op.drop_column('xreports', 'activity_rate', schema='guidebook')
    op.drop_column('xreports_archives', 'activity_rate', schema='guidebook')
    # op.drop_column('xreports_archives', 'avalanche_slope', schema='guidebook')
    drop_enum('activity_rate', schema='guidebook')

    # Rename enum
    op.execute('ALTER TYPE guidebook.activity_rate_ RENAME TO activity_rate')

    # Rename column
    op.alter_column('xreports',
                    'activity_rate_',
                    new_column_name='activity_rate',
                    schema='guidebook')
    op.alter_column('xreports_archives',
                    'activity_rate_',
                    new_column_name='activity_rate',
                    schema='guidebook')

    # end of activity conversion

    # convert autonomy enum
    autonomy_conversions = [('non_autonomous', 'non_autonomous'),
                            ('autonomous', 'autonomous'), ('expert', 'expert')]
    old_autonomy_type = sa.Enum('non_autonomous',
                                'autonomous',
                                'expert',
                                name='autonomy',
                                schema='guidebook')
    new_autonomy_type = sa.Enum('non_autonomous',
                                'autonomous',
                                'initiator',
                                'expert',
                                name='autonomy_',
                                schema='guidebook')
    new_autonomy_type.create(op.get_bind())
    # op.alter_column('xreports', 'autonomy',
    #                 type_=new_autonomy_type,
    #                 existing_type=old_autonomy_type,
    #                 schema='guidebook')
    # does not allow automatic casting if table not empty

    op.add_column('xreports',
                  sa.Column('autonomy_', new_autonomy_type, nullable=True),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('autonomy_', new_autonomy_type, nullable=True),
                  schema='guidebook')

    xr = Table('xreports',
               MetaData(),
               sa.Column('autonomy', old_autonomy_type),
               sa.Column('autonomy_', new_autonomy_type, nullable=True),
               schema='guidebook')
    xra = Table('xreports_archives',
                MetaData(),
                sa.Column('autonomy', old_autonomy_type),
                sa.Column('autonomy_', new_autonomy_type, nullable=True),
                schema='guidebook')
    for (old_value, new_value) in autonomy_conversions:
        op.execute(xr.update().where(
            xr.c.autonomy == op.inline_literal(old_value)).values(
                autonomy_=op.inline_literal(new_value)))
        op.execute(xra.update().where(
            xra.c.autonomy == op.inline_literal(old_value)).values(
                autonomy_=op.inline_literal(new_value)))

    op.drop_column('xreports', 'autonomy', schema='guidebook')
    op.drop_column('xreports_archives', 'autonomy', schema='guidebook')
    # op.drop_column('xreports_archives', 'avalanche_slope', schema='guidebook')
    drop_enum('autonomy', schema='guidebook')

    # Rename enum
    op.execute('ALTER TYPE guidebook.autonomy_ RENAME TO autonomy')

    # Rename column
    op.alter_column('xreports',
                    'autonomy_',
                    new_column_name='autonomy',
                    schema='guidebook')
    op.alter_column('xreports_archives',
                    'autonomy_',
                    new_column_name='autonomy',
                    schema='guidebook')

    # end of autonomy conversion

    op.drop_column('xreports', 'supervision', schema='guidebook')
    op.drop_column('xreports_archives', 'supervision', schema='guidebook')
    drop_enum('supervision_type', schema='guidebook')
    op.drop_column('xreports', 'qualification', schema='guidebook')
    op.drop_column('xreports_archives', 'qualification', schema='guidebook')
    drop_enum('qualification_type', schema='guidebook')

    nb_outing_type = sa.Enum('nb_outings_4',
                             'nb_outings_9',
                             'nb_outings_14',
                             'nb_outings_15',
                             name='nb_outings',
                             schema='guidebook')
    nb_outing_type.create(op.get_bind())
    op.add_column('xreports',
                  sa.Column('nb_outings', nb_outing_type, nullable=True),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('nb_outings', nb_outing_type, nullable=True),
                  schema='guidebook')

    activity_conversions = [('other', 'hiking'), ('skitouring', 'skitouring'),
                            ('snow_ice_mixed', 'snow_ice_mixed'),
                            ('alpine_climbing', 'mountain_climbing'),
                            ('sport_climbing', 'rock_climbing'),
                            ('ice_climbing', 'ice_climbing')]
    old_activity_type = sa.Enum('sport_climbing',
                                'multipitch_climbing',
                                'alpine_climbing',
                                'snow_ice_mixed',
                                'ice_climbing',
                                'skitouring',
                                'other',
                                name='event_activity_type',
                                schema='guidebook')

    activities_type = ArrayOfEnum(
        sa.Enum('skitouring',
                'snow_ice_mixed',
                'mountain_climbing',
                'rock_climbing',
                'ice_climbing',
                'hiking',
                'snowshoeing',
                'paragliding',
                'mountain_biking',
                'via_ferrata',
                'slacklining',
                name='activity_type',
                schema='guidebook'))
    op.add_column('xreports',
                  sa.Column('activities', activities_type, nullable=True),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('activities', activities_type, nullable=True),
                  schema='guidebook')

    xr = Table('xreports',
               MetaData(),
               sa.Column('activities', activities_type, nullable=True),
               sa.Column('event_activity', old_activity_type),
               schema='guidebook')
    xra = Table('xreports_archives',
                MetaData(),
                sa.Column('activities', activities_type, nullable=True),
                sa.Column('event_activity', old_activity_type),
                schema='guidebook')
    for (old_value, new_value) in activity_conversions:
        op.execute(xr.update().where(
            xr.c.event_activity == op.inline_literal(old_value)).values(
                activities=sa.literal([new_value])))
        op.execute(xra.update().where(
            xra.c.event_activity == op.inline_literal(old_value)).values(
                activities=sa.literal([new_value])))

    op.alter_column('xreports',
                    'activities',
                    nullable=False,
                    schema='guidebook')
    op.alter_column('xreports_archives',
                    'activities',
                    nullable=False,
                    schema='guidebook')

    op.drop_column('xreports', 'event_activity', schema='guidebook')
    op.drop_column('xreports_archives', 'event_activity', schema='guidebook')
    drop_enum('event_activity_type', schema='guidebook')

    # convert types
    type_conversions = [('avalanche', 'avalanche'),
                        ('stone_ice_fall', 'stone_fall'),
                        ('ice_cornice_collapse', 'falling_ice'),
                        ('person_fall', 'person_fall'),
                        ('crevasse_fall', 'crevasse_fall'),
                        ('physical_failure', 'physical_failure'),
                        ('injury_without_fall', 'other'),
                        ('blocked_person', 'other'),
                        ('safety_operation', 'other'),
                        ('critical_situation', 'other'),
                        ('weather_event', 'lightning'), ('other', 'other')]
    old_event_type = sa.Enum('avalanche',
                             'stone_ice_fall',
                             'ice_cornice_collapse',
                             'person_fall',
                             'crevasse_fall',
                             'physical_failure',
                             'injury_without_fall',
                             'blocked_person',
                             'weather_event',
                             'safety_operation',
                             'critical_situation',
                             'other',
                             name='event_type',
                             schema='guidebook')
    new_event_type = sa.Enum('avalanche',
                             'stone_fall',
                             'falling_ice',
                             'person_fall',
                             'crevasse_fall',
                             'roped_fall',
                             'physical_failure',
                             'lightning',
                             'other',
                             name='event_type_',
                             schema='guidebook')
    new_event_type.create(op.get_bind())
    op.add_column('xreports',
                  sa.Column('event_type_', ArrayOfEnum(new_event_type)),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('event_type_', ArrayOfEnum(new_event_type)),
                  schema='guidebook')

    xr = Table('xreports',
               MetaData(),
               sa.Column('event_type', old_event_type),
               sa.Column('event_type_', ArrayOfEnum(new_event_type)),
               schema='guidebook')
    xra = Table('xreports_archives',
                MetaData(),
                sa.Column('event_type', old_event_type),
                sa.Column('event_type_', ArrayOfEnum(new_event_type)),
                schema='guidebook')
    for (old_value, new_value) in type_conversions:
        op.execute(xr.update().where(
            xr.c.event_type == op.inline_literal(old_value)).values(
                event_type_=sa.literal([new_value])))
        op.execute(xra.update().where(
            xra.c.event_type == op.inline_literal(old_value)).values(
                event_type_=sa.literal([new_value])))

    op.drop_column('xreports', 'event_type', schema='guidebook')
    op.drop_column('xreports_archives', 'event_type', schema='guidebook')
    drop_enum('event_type', schema='guidebook')
    op.execute('ALTER TYPE guidebook.event_type_ RENAME TO event_type')
    op.alter_column('xreports',
                    'event_type_',
                    new_column_name='event_type',
                    schema='guidebook')
    op.alter_column('xreports_archives',
                    'event_type_',
                    new_column_name='event_type',
                    schema='guidebook')
示例#25
0
    Defunct, but maintained for database imports.
    """
    # values for the "name" column
    ID_POLICY_UPLOAD = "idPolicyUpload"  # text
    ID_POLICY_FINALIZE = "idPolicyFinalize"  # text
    SERVER_CAMCOPS_VERSION = "serverCamcopsVersion"  # text
    DATABASE_TITLE = "databaseTitle"  # text
    LAST_ANALYTICS_SENT_AT = "lastAnalyticsSentAt"  # text
    ID_DESCRIPTION_PREFIX = "idDescription"  # text; apply suffixes 1-8
    ID_SHORT_DESCRIPTION_PREFIX = "idShortDescription"  # text; apply suffixes 1-8  # noqa


StoredVarNameColTypeDefunct = String(length=255)
StoredVarTypeColTypeDefunct = String(length=255)
_ssv_metadata = MetaData()

server_stored_var_table_defunct = Table(
    "_server_storedvars",  # table name
    _ssv_metadata,  # metadata separate from everything else
    Column("name",
           StoredVarNameColTypeDefunct,
           primary_key=True,
           index=True,
           comment="Variable name"),
    Column("type",
           StoredVarTypeColTypeDefunct,
           nullable=False,
           comment="Variable type ('integer', 'real', 'text')"),
    Column("valueInteger", Integer, comment="Value of an integer variable"),
    Column("valueText", UnicodeText, comment="Value of a text variable"),
示例#26
0
 def update_metadata(self) -> None:
     """
     Updates the metadata, for example if a table has been dropped.
     """
     self._metadata = MetaData(bind=self.engine)
示例#27
0
class EnronDB:
    def __init__(self, table_name):
        self.engine = None
        self.metadata = MetaData()
        self.table_name = table_name
        
    @classmethod
    def holbox_db(cls):
        db = EnronDB("email_prediction")
        db.init('holbox.lti.cs.cmu.edu', 'inmind', 'yahoo', 'enron_experiment')
        return db
        
    def init(self, host, username, password, db_name):
        engine_desc = 'mysql://%s:%s@%s/%s' % (username, password, host, db_name)
        try:
            self.engine = create_engine(engine_desc)
            self.metadata.reflect(self.engine)
        except:
            print "Unexpected error:", sys.exc_info()[0]
            return False
        return True    
    
    # sql:
    # create table TABLE_NAME (id INT NOT NULL AUTO_INCREMENT, date DATETIME, mime_type TEXT, from_addr TEXT, 
    # to_addr TEXT, subject TEXT, raw_body TEXT, cleaned_body TEXT, one_line TEXT, path TEXT, prediction INT, PRIMARY KEY(id));
    def create_table(self):
        email_table = Table(self.table_name, self.metadata,
            Column('id', Integer, primary_key=True),
            Column('date', Text),
            Column('mime_type', Text),
            Column('from_addr', Text),
            Column('to_addr', Text),
            Column('subject', Text),
            Column('raw_body', Text),
            Column('cleaned_body', Text),
            Column('one_line', Text),
            Column('path', Text),
            Column('prediction', Integer),
            Column('probability', Float)
        )
        email_table.create(self.engine)

    def create_sample_table(self, sample_table_name):
        if sample_table_name == self.table_name:
            print('Cannon use the same table name')
            return
        email_table = Table(sample_table_name, self.metadata,
            Column('id', Integer, primary_key=True),
            Column('date', Text),
            Column('mime_type', Text),
            Column('from_addr', Text),
            Column('to_addr', Text),
            Column('subject', Text),
            Column('raw_body', Text),
            Column('cleaned_body', Text),
            Column('one_line', Text),
            Column('path', Text),
            Column('prediction', Integer),
            Column('probability', Float),
            Column('manual_label', Integer)
        )
        email_table.create(self.engine)

    def get_all_brushed_emails(self):
        email_table = Table('brushed_email', self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_adddr, \
                           email_table.c.subject, email_table.c.body, email_table.c.one_line, \
                           email_table.c.path, email_table.c.label, email_table.c.is_scheduling])
        rp = self.engine.execute(sel_stmt)
        emails = []
        for record in rp:
            email = Email()
            if record is not None:
                email.id = record.id
                email.date = record.date
                email.mime_type = record.mime_type
                email.from_addr = record.from_addr
                email.to_addr = record.to_adddr
                email.subject = record.subject
                email.body = record.body
                email.one_line = record.one_line
                email.path = record.path
                email.label = record.label
                email.is_scheduling = record.is_scheduling or 0
            emails.append(email)
        return emails
    
    def insert_email(self, email):
        if not isinstance(email, Email):
            print 'ERROR: input must be of type Email'
            return
        
        email_table = Table(self.table_name, self.metadata)
        ins_stmt = email_table.insert()
        conn = self.engine.connect()
        conn.execute(ins_stmt, date=email.date,
                              mime_type=email.mime_type,
                              from_addr=email.from_addr,
                              to_addr=email.to_addr,
                              subject=email.subject,
                              raw_body=email.raw_body,
                              cleaned_body=email.cleaned_body,
                              one_line=email.one_line,
                              path=email.path,
                              label=email.label,
                              prediction=email.prediction,
                              probability=email.probability
                              ) 
    
    def get_all_email_predictions(self):
        email_table = Table(self.table_name, self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_addr, \
                           email_table.c.subject, email_table.c.raw_body, email_table.c.cleaned_body, email_table.c.one_line, \
                           email_table.c.path, email_table.c.prediction, email_table.c.probability])
        rp = self.engine.execute(sel_stmt)
        emails = []
        for record in rp:
            email = Email()
            if record is not None:
                email.id = record.id
                email.date = record.date
                email.mime_type = record.mime_type
                email.from_addr = record.from_addr
                email.to_addr = record.to_addr
                email.subject = record.subject
                email.raw_body = record.raw_body
                email.cleaned_body = record.cleaned_body
                email.one_line = record.one_line
                email.path = record.path
                email.prediction = record.prediction
                email.probability = record.probability
            emails.append(email)
        return emails

    def get_sample_emails(self, sample_table_name):
        email_table = Table(sample_table_name, self.metadata)
        sel_stmt = select([email_table.c.id, email_table.c.date, email_table.c.mime_type, \
                           email_table.c.from_addr, email_table.c.to_addr, \
                           email_table.c.subject, email_table.c.raw_body, email_table.c.cleaned_body, email_table.c.one_line, \
                           email_table.c.path, email_table.c.prediction, email_table.c.probability, email_table.c.manual_label])
        rp = self.engine.execute(sel_stmt)
        emails = []
        for record in rp:
            email = Email()
            if record is not None:
                email.id = record.id
                email.date = record.date
                email.mime_type = record.mime_type
                email.from_addr = record.from_addr
                email.to_addr = record.to_addr
                email.subject = record.subject
                email.raw_body = record.raw_body
                email.cleaned_body = record.cleaned_body
                email.one_line = record.one_line
                email.path = record.path
                email.prediction = record.prediction
                email.probability = record.probability
                email.manual_label = record.manual_label
            emails.append(email)
        return emails

    def get_all_email_predictions_greater_than(self, threshold = 0.7):
        s = text("select * from " + self.table_name + " where probability >= " + str(threshold))
        rp = self.engine.execute(s).fetchall()
#         email_table = Table(self.table_name, self.metadata)
#         sel_stmt = select([email_table.c.id, email_table.c.date, email_table.c.mime_type, \
#                            email_table.c.from_addr, email_table.c.to_addr, \
#                            email_table.c.subject, email_table.c.raw_body, email_table.c.cleaned_body, email_table.c.one_line, \
#                            email_table.c.path, email_table.c.prediction, email_table.c.probability]).where(email_table.c.probability >= 0.7)
#         rp = self.engine.execute(sel_stmt)
        emails = []
        for record in rp:
            email = Email()
            if record is not None:
                email.id = record.id
                email.date = record.date
                email.mime_type = record.mime_type
                email.from_addr = record.from_addr
                email.to_addr = record.to_addr
                email.subject = record.subject
                email.raw_body = record.raw_body
                email.cleaned_body = record.cleaned_body
                email.one_line = record.one_line
                email.path = record.path
                email.prediction = record.prediction
                email.probability = record.probability
            emails.append(email)
        return emails
        
    def insert_sample_email(self, sample_table_name, email):
        if not isinstance(email, Email):
            print 'ERROR: input must be of type Email'
            return
        
        if sample_table_name == self.table_name:
            print('Cannot use the same table name')
            return
        
        email_table = Table(sample_table_name, self.metadata)
        ins_stmt = email_table.insert()
        conn = self.engine.connect()
        conn.execute(ins_stmt, date=email.date,
                              mime_type=email.mime_type,
                              from_addr=email.from_addr,
                              to_addr=email.to_addr,
                              subject=email.subject,
                              raw_body=email.raw_body,
                              cleaned_body=email.cleaned_body,
                              one_line=email.one_line,
                              path=email.path,
                              label=email.label,
                              prediction=email.prediction,
                              probability=email.probability,
                              ) 
示例#28
0
mysql_host = host = os.getenv("MYSQL_HOST", "localhost")
port = 3306
user = "******"
password = "******"
database = "wordpress"
mysql_url = f"mysql+pymysql://{user}:{password}@{mysql_host}:{port}/{database}"

# echo为True,开启调试功能 用来设置是否输出sql语句执行的日志
engine = sqlalchemy.create_engine(mysql_url, echo=True)
Session = sessionmaker(bind=engine, autoflush=False)
# 数据库实例 ,使用该session连接数据
session = Session()
# 连接数据库
connect = engine.connect()

meta_data = MetaData(engine)

sms = Table(
    'sms',
    meta_data,
    Column('id', Integer, primary_key=True),
    Column('access_key', String(20)),
    Column('secret_key', String(20)),
    Column('create_at', DateTime),
    Column('update_at', DateTime),
)
coin_price = Table(
    'coin_price',
    meta_data,
    Column('id', Integer, primary_key=True),
    Column('coin', String(10)),
示例#29
0
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import relationship
from sqlalchemy.sql.schema import MetaData

db = SQLAlchemy(metadata=MetaData(
    naming_convention={
        "ix": 'ix_%(column_0_label)s',
        "uq": "uq_%(table_name)s_%(column_0_name)s",
        "ck": "ck_%(table_name)s_%(column_0_name)s",
        "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
        "pk": "pk_%(table_name)s"
    }))


class Base(db.Model):
    __abstract__ = True
    id = db.Column(db.Integer, primary_key=True, nullable=False)

    @declared_attr
    def __tablename__(cls):
        return cls.__name__.lower()


class User(Base):
    __abstract__ = False
    email = db.Column(db.String(128), nullable=False, index=True)
    password = db.Column(db.String(128))


class Station(Base):
示例#30
0
 def test_reflect_table_with_schema(self, engine, connection):
     one_row = Table('one_row', MetaData(bind=engine),
                     schema=SCHEMA, autoload=True)
     self.assertEqual(len(one_row.c), 1)
     self.assertIsNotNone(one_row.c.number_of_rows)
示例#31
0
    #   # ... gives 'hello abcdefghij world'
    # "ck": "ck_%(table_name).30s_%(column_0_name).30s",
    # 3 for "ck_" leaves 61; 30 for table, 1 for "_", 30 for column
    # ... no...
    # "obs_contamination_bodily_waste_*"
    "ck": "ck_%(table_name)s_%(column_0_name)s",  # unique but maybe too long

    # FOREIGN KEY:
    # "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",  # too long for MySQL sometimes!  # noqa
    "fk": "fk_%(table_name)s_%(column_0_name)s",
    # "fk": "fk_%(column_0_name)s",

    # PRIMARY KEY:
    "pk": "pk_%(table_name)s"
}
MASTER_META = MetaData(naming_convention=NAMING_CONVENTION)

# The base of all our model classes:
Base = declarative_base(metadata=MASTER_META)

# Special options:
Base.__table_args__ = {
    # -------------------------------------------------------------------------
    # MySQL special options
    # -------------------------------------------------------------------------
    # SQLAlchemy __table_args__:
    #   http://docs.sqlalchemy.org/en/latest/orm/extensions/declarative/table_config.html  # noqa
    # SQLAlchemy sends keyword arguments like 'mysql_keyword_name' to be
    # rendered as KEYWORD_NAME in the CREATE TABLE statement:
    #   http://docs.sqlalchemy.org/en/latest/dialects/mysql.html
示例#32
0
 def test_has_table(self, engine, conn):
     self.assertTrue(Table("one_row", MetaData(bind=engine)).exists())
     self.assertFalse(
         Table("this_table_does_not_exist", MetaData(bind=engine)).exists())
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table('corp_types',
    sa.Column('corp_type_cd', sa.String(length=5), nullable=False),
    sa.Column('colin_ind', sa.String(length=1), nullable=False),
    sa.Column('corp_class', sa.String(length=10), nullable=False),
    sa.Column('short_desc', sa.String(length=25), nullable=False),
    sa.Column('full_desc', sa.String(length=100), nullable=False),
    sa.Column('legislation', sa.String(length=100), nullable=True),
    sa.PrimaryKeyConstraint('corp_type_cd')
    )

    
    meta = MetaData(bind=op.get_bind())
    meta.reflect(only=('corp_types',))

    corp_types_table = Table('corp_types', meta)

    op.bulk_insert(
        corp_types_table,
        [
            {'corp_type_cd': 'A', 'colin_ind': 'Y', 'corp_class': 'XPRO', 'short_desc': 'EXTRA PRO', 'full_desc': 'Extraprovincial Company', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'B', 'colin_ind': 'Y', 'corp_class': 'XPRO', 'short_desc': 'EXTRA PRO', 'full_desc': 'Extraprovincial Company', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'BC', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'BC COMPANY', 'full_desc': 'BC Limited Company', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'C', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'CONTINUE IN', 'full_desc': 'BC Limited Company', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'CEM', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'CEMETARY', 'full_desc': 'Cemetary', 'legislation': ''},
            {'corp_type_cd': 'CP', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'COOP', 'full_desc': 'BC Cooperative Association', 'legislation': 'BC Cooperative Association Act'},
            {'corp_type_cd': 'EPR', 'colin_ind': 'Y', 'corp_class': 'XPRO', 'short_desc': 'EXTRA PRO REG', 'full_desc': 'Extraprovincial Registration', 'legislation': ''},
            {'corp_type_cd': 'FOR', 'colin_ind': 'Y', 'corp_class': 'XPRO', 'short_desc': 'FOREIGN', 'full_desc': 'Foreign Registration', 'legislation': ''},
            {'corp_type_cd': 'LIC', 'colin_ind': 'Y', 'corp_class': 'XPRO', 'short_desc': 'LICENSED', 'full_desc': 'Licensed (Extra-Pro)', 'legislation': ''},
            {'corp_type_cd': 'LIB', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'LIBRARY', 'full_desc': 'Public Library Association', 'legislation': ''},
            {'corp_type_cd': 'LLC', 'colin_ind': 'Y', 'corp_class': 'XPRO', 'short_desc': 'LIMITED CO', 'full_desc': 'Limited Liability Company', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'PA', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'PRIVATE ACT', 'full_desc': 'Private Act', 'legislation': 'Private Act'},
            {'corp_type_cd': 'PAR', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'PARISHES', 'full_desc': 'Parishes', 'legislation': 'BC Partnership Act'},
            {'corp_type_cd': 'PFS', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'PENS FUND SOC', 'full_desc': 'Pension Funded Society', 'legislation': ''},
            {'corp_type_cd': 'QA', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'CO 1860', 'full_desc': 'CO 1860', 'legislation': ''},
            {'corp_type_cd': 'QB', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'CO 1862', 'full_desc': 'CO 1862', 'legislation': ''},
            {'corp_type_cd': 'QC', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'CO 1878', 'full_desc': 'CO 1878', 'legislation': ''},
            {'corp_type_cd': 'QD', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'CO 1890', 'full_desc': 'CO 1890', 'legislation': ''},
            {'corp_type_cd': 'QE', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'CO 1897', 'full_desc': 'CO 1897', 'legislation': ''},
            {'corp_type_cd': 'REG', 'colin_ind': 'Y', 'corp_class': 'XPRO', 'short_desc': 'REGISTRATION', 'full_desc': 'Registraton (Extra-pro)', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'RLY', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'RAILWAYS', 'full_desc': 'Railways', 'legislation': ''},
            {'corp_type_cd': 'SB', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'SOCIETY BRANCH', 'full_desc': 'Society Branch', 'legislation': ''},
            {'corp_type_cd': 'T', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'TRUST', 'full_desc': 'Trust', 'legislation': 'BC Partnership Act'},
            {'corp_type_cd': 'TMY', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'TRAMWAYS', 'full_desc': 'Tramways', 'legislation': ''},
            {'corp_type_cd': 'XCP', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'XPRO COOP', 'full_desc': 'Extraprovincial Cooperative Association', 'legislation': 'BC Cooperative Association Act'},
            {'corp_type_cd': 'ULC', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'BC ULC COMPANY', 'full_desc': 'BC Unlimited Liability Company', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'CUL', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'ULC CONTINUE IN', 'full_desc': 'Continuation In as a BC ULC', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'UQA', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'ULC CO 1860', 'full_desc': 'ULC CO 1860', 'legislation': ''},
            {'corp_type_cd': 'UQB', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'ULC CO 1862', 'full_desc': 'ULC CO 1862', 'legislation': ''},
            {'corp_type_cd': 'UQC', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'ULC CO 1878', 'full_desc': 'ULC CO 1878', 'legislation': ''},
            {'corp_type_cd': 'UQD', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'ULC CO 1890', 'full_desc': 'ULC CO 1890', 'legislation': ''},
            {'corp_type_cd': 'UQE', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'ULC CO 1897', 'full_desc': 'ULC CO 1897', 'legislation': ''},
            {'corp_type_cd': 'CC', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'BC CCC', 'full_desc': 'BC Community Contribution Company', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'CCC', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'CCC CONTINUE IN', 'full_desc': 'BC Community Contribution Company', 'legislation': 'BC Business Corporations Act'},
            {'corp_type_cd': 'S', 'colin_ind': 'Y', 'corp_class': 'SOC', 'short_desc': 'SOCIETY', 'full_desc': 'Society', 'legislation': 'BC Societies Act'},
            {'corp_type_cd': 'XS', 'colin_ind': 'Y', 'corp_class': 'SOC', 'short_desc': 'XPRO SOCIETY', 'full_desc': 'Extraprovincial Society', 'legislation': 'BC Societies Act'},
            {'corp_type_cd': 'SP', 'colin_ind': 'Y', 'corp_class': 'FIRM', 'short_desc': 'SOLE PROP', 'full_desc': 'Sole Proprietorship', 'legislation': 'BC Partnership Act'},
            {'corp_type_cd': 'GP', 'colin_ind': 'Y', 'corp_class': 'FIRM', 'short_desc': 'PARTNERSHIP', 'full_desc': 'General Partnership', 'legislation': 'BC Partnership Act'},
            {'corp_type_cd': 'LP', 'colin_ind': 'Y', 'corp_class': 'FIRM', 'short_desc': 'LIM PARTNERSHIP', 'full_desc': 'Limited Partnership', 'legislation': 'BC Partnership Act'},
            {'corp_type_cd': 'XP', 'colin_ind': 'Y', 'corp_class': 'FIRM', 'short_desc': 'XPRO LIM PARTNR', 'full_desc': 'Extraprovincial Limited Partnership', 'legislation': 'BC Partnership Act'},
            {'corp_type_cd': 'LL', 'colin_ind': 'Y', 'corp_class': 'FIRM', 'short_desc': 'LL PARTNERSHIP', 'full_desc': 'Limited Liability Partnership', 'legislation': 'BC Partnership Act'},
            {'corp_type_cd': 'XL', 'colin_ind': 'Y', 'corp_class': 'FIRM', 'short_desc': 'XPRO LL PARTNR', 'full_desc': 'Extrapro Limited Liability Partnership', 'legislation': 'BC Partnership Act'},
            {'corp_type_cd': 'MF', 'colin_ind': 'Y', 'corp_class': 'FIRM', 'short_desc': 'MISC FIRM', 'full_desc': 'Miscellaneous Firm', 'legislation': 'BC Partnership Act'},
            {'corp_type_cd': 'FI', 'colin_ind': 'N', 'corp_class': 'OT', 'short_desc': 'FINANCIAL', 'full_desc': 'Financial Institutions', 'legislation': 'Credit Union Incorporation Act'},
            {'corp_type_cd': 'CS', 'colin_ind': 'Y', 'corp_class': 'SOC', 'short_desc': 'CONT IN SOCIETY', 'full_desc': 'Society', 'legislation': 'BC Societies Act'},
            {'corp_type_cd': 'BEN', 'colin_ind': 'Y', 'corp_class': 'BC', 'short_desc': 'BENEFIT COMPANY', 'full_desc': 'BC Benefit Company', 'legislation': 'BC Business Corporations Act'}
        ]
    )
示例#34
0
if len(argv) > 5:
    login_pontual = argv[5]
else:
    login_pontual = 'admin%'

if (len(argv) > 6 and argv[6].upper() == 'TRUE'):
    limpar_tabela = True
else:
    limpar_tabela = False




#Mapeia a database do pontual
__m = MetaData(schema='pontual')
__m2 = MetaData(schema='seguranca')
# __m = MetaData()
url = "postgresql://%s:%s@%s/pontual" % (usuario, senha, ip_nome)
__engine = create_engine(url)
# __m.reflect(__engine, only=['linha', 'area_de_fiscalizacao', 'ponto_de_parada'])
__m2.reflect(__engine, only=['user'])
__Base = automap_base(bind=__engine, metadata=__m)
__Base.prepare(__engine, reflect=True)
__Base2 = automap_base(bind=__engine, metadata=__m2)
__Base2.prepare(__engine, reflect=True)


#Objeto ORMs do sqlalchmy do pontual
User = __Base2.classes.user
Linha = __Base.classes.linha
AreaDeFiscalizacao = __Base.classes.area_de_fiscalizacao
示例#35
0
 def __init__(self, table_name):
     self.engine = None
     self.metadata = MetaData()
     self.table_name = table_name
示例#36
0
class DatabaseHolder(object):
    """
    Object to represent a connection to a database.
    """
    def __init__(self,
                 name: str,
                 url: str,
                 srccfg: DB_SAFE_CONFIG_FWD_REF = None,
                 with_session: bool = False,
                 with_conn: bool = True,
                 reflect: bool = True,
                 encoding: str = 'utf-8',
                 echo: bool = False) -> None:
        """
        Args:
            name: internal database name
            url: SQLAlchemy URL
            srccfg: :class:`crate_anon.anonymise.config.DatabaseSafeConfig`
            with_session: create an SQLAlchemy Session?
            with_conn: create an SQLAlchemy connection (via an Engine)?
            reflect: read the database structure (when required)?
            encoding: passed to SQLAlchemy's :func:`create_engine`
            echo: passed to SQLAlchemy's :func:`create_engine`
        """
        self.name = name
        self.srccfg = srccfg
        self.engine = create_engine(url, encoding=encoding, echo=echo)
        self.conn = None  # type: Optional[Connection]
        self.session = None  # type: Optional[Session]
        self._reflect_on_request = reflect
        self._reflected = False
        self._table_names = []  # type: List[str]
        self._metadata = MetaData(bind=self.engine)
        log.debug(self.engine)  # obscures password

        if with_conn:  # for raw connections
            self.conn = self.engine.connect()
        if with_session:  # for ORM
            self.session = sessionmaker(bind=self.engine)()  # for ORM

    def _reflect(self) -> None:
        """
        Perform the database reflection.

        Reflection is expensive, so we defer unless required
        """
        if not self._reflect_on_request:
            return
        log.info(f"Reflecting database: {self.name}")
        # self.table_names = get_table_names(self.engine)
        self._metadata.reflect(views=True)  # include views
        self._table_names = [t.name for t in self._metadata.sorted_tables]
        self._reflected = True

    def update_metadata(self) -> None:
        """
        Updates the metadata, for example if a table has been dropped.
        """
        self._metadata = MetaData(bind=self.engine)

    @property
    def metadata(self) -> MetaData:
        """
        Returns the SQLAlchemy :class:`MetaData`. If reflection is enabled,
        ensure the database has been reflected first.
        """
        if not self._reflected:
            self._reflect()
        return self._metadata

    @property
    def table_names(self) -> List[str]:
        """
        Returns the table names from the database, if reflection is enabled.
        (Otherwise returns an empty list.)
        """
        if not self._reflected:
            self._reflect()
        return self._table_names
示例#37
0
    def __init__(self, engine, schema=None):
        self._metadata = MetaData(schema=schema)
        self.mappers = []

        _feedinfo_id_column = Column('feed_id', String, primary_key=True)
        _agency_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _route_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _feedinfo_mapper = Table('feed_info', self._metadata,
                    _feedinfo_id_column,
                    Column('feed_publisher_name', String),
                    Column('feed_publisher_url', String),
                    Column('feed_contact_email', String), # Non-standard (yet) field
                    Column('feed_contact_url', String), # Non-standard (yet) field
                    Column('feed_lang', String),
                    Column('feed_start_date', Date),
                    Column('feed_end_date', Date),
                    Column('feed_version', String))
        self.mappers.append(mapper(FeedInfo, _feedinfo_mapper, properties={
        }))

        _agency_id_column = Column('agency_id', String, primary_key=True)
        _route_agency_id_column = Column('agency_id', String, nullable=False)
        _agency_mapper = Table('agency', self._metadata,
                    _agency_feed_id_column,
                    _agency_id_column,
                    Column('agency_name', String, nullable=False),
                    Column('agency_url', String, nullable=False),
                    Column('agency_timezone', String, nullable=False),
                    Column('agency_lang', String),
                    Column('agency_phone', String),
                    Column('agency_fare_url', String),
                    Column('agency_email', String))
        self.mappers.append(mapper(Agency, _agency_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('agencies', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_agency_feed_id_column))
        }))

        _zone_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _zone_id_column = Column('zone_id', String, primary_key=True)
        _zone_mapper = Table('zones', self._metadata,
                    _zone_feed_id_column,
                    _zone_id_column)
        self.mappers.append(mapper(Zone, _zone_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('zones', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_zone_feed_id_column))
        }))

        _stop_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _stop_id_column = Column('stop_id', String, primary_key=True)
        _stop_parent_id_column = Column('parent_station_id', String, nullable=True)
        _stop_zone_id_column = Column('zone_id', String, nullable=True)
        _stop_mapper = Table('stops', self._metadata,
                    _stop_feed_id_column,
                    _stop_id_column,
                    _stop_parent_id_column,
                    Column('location_type', Integer, nullable=False),
                    Column('stop_name', String, nullable=False),
                    Column('stop_lat', Float, nullable=False),
                    Column('stop_lon', Float, nullable=False),
                    Column('wheelchair_boarding', Integer, nullable=False),
                    Column('stop_code', String),
                    Column('stop_desc', String),
                    _stop_zone_id_column,
                    Column('stop_url', String),
                    Column('stop_timezone', String),
                    ForeignKeyConstraint(['feed_id', 'parent_station_id'], ['stops.feed_id', 'stops.stop_id']),
                    ForeignKeyConstraint(['feed_id', 'zone_id'], ['zones.feed_id', 'zones.zone_id']),
                    # TODO Make those index parametrable
                    Index('idx_stops_lat', 'stop_lat'),
                    Index('idx_stops_lon', 'stop_lon'),
                    Index('idx_stops_code', 'feed_id', 'stop_code'),
                    Index('idx_stops_zone', 'feed_id', 'zone_id'),
                    Index('idx_stops_parent', 'feed_id', 'parent_station_id'))
        self.mappers.append(mapper(Stop, _stop_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('stops', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_stop_feed_id_column)),
            'sub_stops' : relationship(Stop, remote_side=[_stop_feed_id_column, _stop_parent_id_column], uselist=True,
                                       primaryjoin=(_stop_parent_id_column == foreign(_stop_id_column)) & (_stop_feed_id_column == _stop_feed_id_column)),
            'parent_station' : relationship(Stop, remote_side=[_stop_feed_id_column, _stop_id_column],
                                       primaryjoin=(_stop_id_column == foreign(_stop_parent_id_column)) & (_stop_feed_id_column == _stop_feed_id_column)),
            'zone' : relationship(Zone, backref=backref('stops', cascade="all,delete-orphan"),
                                   primaryjoin=(_zone_id_column == foreign(_stop_zone_id_column)) & (_zone_feed_id_column == _stop_feed_id_column))
        }))

        _transfer_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _transfer_from_stop_id_column = Column('from_stop_id', String, primary_key=True)
        _transfer_to_stop_id_column = Column('to_stop_id', String, primary_key=True)
        _transfer_mapper = Table('transfers', self._metadata,
                    _transfer_feed_id_column,
                    _transfer_from_stop_id_column,
                    _transfer_to_stop_id_column,
                    Column('transfer_type', Integer, nullable=False),
                    Column('min_transfer_time', Integer),
                    ForeignKeyConstraint(['feed_id', 'from_stop_id'], ['stops.feed_id', 'stops.stop_id']),
                    ForeignKeyConstraint(['feed_id', 'to_stop_id'], ['stops.feed_id', 'stops.stop_id']),
                    Index('idx_transfer_from', 'feed_id', 'from_stop_id'),
                    Index('idx_transfer_to', 'feed_id', 'to_stop_id'))
        self.mappers.append(mapper(Transfer, _transfer_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('transfers', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_transfer_feed_id_column)),
            'from_stop' : relationship(Stop, backref=backref('from_transfers', cascade='all', uselist=True), uselist=False,
                                       primaryjoin=(_transfer_from_stop_id_column == foreign(_stop_id_column)) & (_transfer_feed_id_column == _stop_feed_id_column)),
            'to_stop' : relationship(Stop, backref=backref('to_transfers', cascade='all', uselist=True), uselist=False,
                                     primaryjoin=(_transfer_to_stop_id_column == foreign(_stop_id_column)) & (_transfer_feed_id_column == _stop_feed_id_column))
        }))

        _route_id_column = Column('route_id', String, primary_key=True)
        _route_mapper = Table('routes', self._metadata,
                    _route_feed_id_column,
                    _route_id_column,
                    _route_agency_id_column,
                    Column('route_short_name', String),
                    Column('route_long_name', String),
                    Column('route_desc', String),
                    Column('route_type', Integer, nullable=False),
                    Column('route_url', String),
                    Column('route_color', String),
                    Column('route_text_color', String),
                    ForeignKeyConstraint(['feed_id', 'agency_id'], ['agency.feed_id', 'agency.agency_id']),
                    Index('idx_routes_agency', 'feed_id', 'agency_id'),
                    Index('idx_routes_short_name', 'feed_id', 'route_short_name'),
                    Index('idx_routes_type', 'feed_id', 'route_type'))
        self.mappers.append(mapper(Route, _route_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('routes', cascade="all,delete-orphan"),
                                    primaryjoin=_feedinfo_id_column == foreign(_route_feed_id_column)),
            'agency' : relationship(Agency, backref=backref('routes', cascade="all,delete-orphan"),
                                    primaryjoin=(_agency_id_column == foreign(_route_agency_id_column)) & (_agency_feed_id_column == _route_feed_id_column))
        }))

        _calendar_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _calendar_id_column = Column('service_id', String, primary_key=True)
        _calendar_mapper = Table('calendar', self._metadata,
                    _calendar_feed_id_column,
                    _calendar_id_column
                    )
        self.mappers.append(mapper(Calendar, _calendar_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('calendars', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_calendar_feed_id_column))
        }))

        _calendar_date_mapper = Table('calendar_dates', self._metadata,
                    Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True),
                    Column('service_id', String, primary_key=True),
                    Column('date', Date, primary_key=True),
                    ForeignKeyConstraint(['feed_id', 'service_id'], ['calendar.feed_id', 'calendar.service_id']),
                    # TOCHECK It seems a composite primary key on (a,b,c) does not need indexing on left elements,
                    # such as (a) and (a,b); but need on (a,c) for example.
                    Index('idx_calendar_dates_date', 'feed_id', 'date'))
        self.mappers.append(mapper(CalendarDate, _calendar_date_mapper, properties={
            'calendar' : relationship(Calendar, backref=backref('dates', cascade="all,delete-orphan"))
        }))

        _shape_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _shape_id_column = Column('shape_id', String, primary_key=True)
        _shape_mapper = Table('shapes', self._metadata,
                    _shape_feed_id_column,
                    _shape_id_column
                    )
        self.mappers.append(mapper(Shape, _shape_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('shapes', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_shape_feed_id_column))
        }))

        _shape_pt_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _shape_pt_shape_id_column = Column('shape_id', String, primary_key=True)
        _shape_pt_seq_column = Column('shape_pt_sequence', Integer, primary_key=True)
        _shape_pt_mapper = Table('shape_pts', self._metadata,
                    _shape_pt_feed_id_column,
                    _shape_pt_shape_id_column,
                    _shape_pt_seq_column,
                    Column('shape_dist_traveled', Float, nullable=False),
                    Column('shape_pt_lat', Float, nullable=False),
                    Column('shape_pt_lon', Float, nullable=False),
                    ForeignKeyConstraint(['feed_id', 'shape_id'], ['shapes.feed_id', 'shapes.shape_id']),
                    Index('idx_shape_pt_shape', 'feed_id', 'shape_id'))
        self.mappers.append(mapper(ShapePoint, _shape_pt_mapper, properties={
            # Note: here we specify foreign() on shape_pt feed_id column as there is no ownership relation of feed to shape_pts
            'shape' : relationship(Shape, backref=backref('points', order_by=_shape_pt_seq_column, cascade="all,delete-orphan"),
                                  primaryjoin=(_shape_id_column == foreign(_shape_pt_shape_id_column)) & (_shape_feed_id_column == foreign(_shape_pt_feed_id_column)))
        }))

        _trip_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _trip_id_column = Column('trip_id', String, primary_key=True)
        _trip_route_id_column = Column('route_id', String, nullable=False)
        _trip_calendar_id_column = Column('service_id', String, nullable=False)
        _trip_shape_id_column = Column('shape_id', String, nullable=True)
        _trip_mapper = Table('trips', self._metadata,
                    _trip_feed_id_column,
                    _trip_id_column,
                    _trip_route_id_column,
                    _trip_calendar_id_column,
                    _trip_shape_id_column,
                    Column('wheelchair_accessible', Integer, nullable=False),
                    Column('bikes_allowed', Integer, nullable=False),
                    Column('exact_times', Integer, nullable=False),
                    Column('frequency_generated', Boolean, nullable=False),
                    Column('trip_headsign', String),
                    Column('trip_short_name', String),
                    Column('direction_id', Integer),
                    Column('block_id', String),
                    ForeignKeyConstraint(['feed_id', 'route_id'], ['routes.feed_id', 'routes.route_id']),
                    ForeignKeyConstraint(['feed_id', 'service_id'], ['calendar.feed_id', 'calendar.service_id']),
                    ForeignKeyConstraint(['feed_id', 'shape_id'], ['shapes.feed_id', 'shapes.shape_id']),
                    Index('idx_trips_route', 'feed_id', 'route_id'),
                    Index('idx_trips_service', 'feed_id', 'service_id'))
        self.mappers.append(mapper(Trip, _trip_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('trips', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_trip_feed_id_column)),
            'route' : relationship(Route, backref=backref('trips', cascade="all,delete-orphan"),
                                   primaryjoin=(_route_id_column == foreign(_trip_route_id_column)) & (_route_feed_id_column == _trip_feed_id_column)),
            'calendar' : relationship(Calendar, backref=backref('trips', cascade="all,delete-orphan"),
                                      primaryjoin=(_calendar_id_column == foreign(_trip_calendar_id_column)) & (_calendar_feed_id_column == _trip_feed_id_column)),
            'shape' : relationship(Shape, backref=backref('trips', cascade="all,delete-orphan"),
                                      primaryjoin=(_shape_id_column == foreign(_trip_shape_id_column)) & (_shape_feed_id_column == _trip_feed_id_column))
        }))

        _stop_times_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _stop_times_trip_id_column = Column('trip_id', String, primary_key=True)
        _stop_seq_column = Column('stop_sequence', Integer, primary_key=True)
        _stop_times_stop_id_column = Column('stop_id', String, nullable=False)
        _stop_times_mapper = Table('stop_times', self._metadata,
                    _stop_times_feed_id_column,
                    _stop_times_trip_id_column,
                    _stop_seq_column,
                    _stop_times_stop_id_column,
                    Column('arrival_time', Integer, nullable=True),
                    Column('departure_time', Integer, nullable=True),
                    Column('interpolated', Boolean, nullable=False),
                    Column('shape_dist_traveled', Float, nullable=False),
                    Column('timepoint', Integer, nullable=False),
                    Column('pickup_type', Integer, nullable=False),
                    Column('drop_off_type', Integer, nullable=False),
                    Column('stop_headsign', String),
                    ForeignKeyConstraint(['feed_id', 'trip_id'], ['trips.feed_id', 'trips.trip_id']),
                    ForeignKeyConstraint(['feed_id', 'stop_id'], ['stops.feed_id', 'stops.stop_id']),
                    Index('idx_stop_times_stop', 'feed_id', 'stop_id'),
                    Index('idx_stop_times_sequence', 'feed_id', 'stop_sequence'))
        self.mappers.append(mapper(StopTime, _stop_times_mapper, properties={
            # Note: here we specify foreign() on stop_times feed_id column as there is no ownership relation of feed to stop_times
            'trip' : relationship(Trip, backref=backref('stop_times', order_by=_stop_seq_column, cascade="all,delete-orphan"),
                                  primaryjoin=(_trip_id_column == foreign(_stop_times_trip_id_column)) & (_trip_feed_id_column == foreign(_stop_times_feed_id_column))),
            'stop' : relationship(Stop, backref=backref('stop_times', cascade="all,delete-orphan"),
                                  primaryjoin=(_stop_id_column == foreign(_stop_times_stop_id_column)) & (_stop_feed_id_column == _stop_times_feed_id_column)),
        }))

        _fareattr_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _fareattr_id_column = Column('fare_id', String, primary_key=True)
        _fareattr_mapper = Table('fare_attributes', self._metadata,
                    _fareattr_feed_id_column,
                    _fareattr_id_column,
                    Column('price', Float, nullable=False),
                    Column('currency_type', String, nullable=False),
                    Column('payment_method', Integer, nullable=False),
                    Column('transfers', Integer),
                    Column('transfer_duration', Integer))
        self.mappers.append(mapper(FareAttribute, _fareattr_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('fare_attributes', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_fareattr_feed_id_column))
        }))

        _farerule_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'))
        _farerule_id_column = Column('fare_id', String)
        # Use a dummy autoincrement numerical field for primary key,
        # as a primary key is mandatory, and the natural primary key
        # for the model (feed_id, fare_id, route+zones ids) do have
        # some fields that can be null.
        _farerule_rule_id_column = Column('fare_rule_id', Integer, primary_key=True, autoincrement=True)
        _farerule_route_id_column = Column('route_id', String, nullable=True)
        _farerule_origin_id_column = Column('origin_id', String, nullable=True)
        _farerule_destination_id_column = Column('destination_id', String, nullable=True)
        _farerule_contains_id_column = Column('contains_id', String, nullable=True)
        _farerule_mapper = Table('fare_rules', self._metadata,
                    _farerule_feed_id_column,
                    _farerule_id_column,
                    _farerule_rule_id_column,
                    _farerule_route_id_column,
                    _farerule_origin_id_column,
                    _farerule_destination_id_column,
                    _farerule_contains_id_column,
                    ForeignKeyConstraint(['feed_id', 'fare_id'], ['fare_attributes.feed_id', 'fare_attributes.fare_id']))
        self.mappers.append(mapper(FareRule, _farerule_mapper, properties={
            'fare_attribute' : relationship(FareAttribute, backref=backref('fare_rules', cascade="all,delete-orphan")),
            'route' : relationship(Route, backref=backref('fare_rules', cascade="all,delete-orphan"),
                            primaryjoin=(_route_id_column == foreign(_farerule_route_id_column)) & (_route_feed_id_column == _farerule_feed_id_column)),
            'origin' : relationship(Zone, backref=backref('origin_fare_rules', cascade="all,delete-orphan"),
                            primaryjoin=(_zone_id_column == foreign(_farerule_origin_id_column)) & (_zone_feed_id_column == _farerule_feed_id_column)),
            'destination' : relationship(Zone, backref=backref('destination_fare_rules', cascade="all,delete-orphan"),
                            primaryjoin=(_zone_id_column == foreign(_farerule_destination_id_column)) & (_zone_feed_id_column == _farerule_feed_id_column)),
            'contains' : relationship(Zone, backref=backref('contains_fare_rules', cascade="all,delete-orphan"),
                            primaryjoin=(_zone_id_column == foreign(_farerule_contains_id_column)) & (_zone_feed_id_column == _farerule_feed_id_column))
        }))

        self._metadata.create_all(engine)
        self._class_for_table = {}
        self._table_for_class = {}
        for _mapper in self.mappers:
            self._class_for_table[_mapper.mapped_table.name] = _mapper.class_
            self._table_for_class[_mapper.class_] = _mapper.mapped_table.name
示例#38
0
 def connect_db(cls):
     strConnect = 'sqlite:///join.db'
     engine = create_engine(strConnect, echo=False)
     cls.meta = MetaData()
     cls.meta.reflect(bind=engine)
     return engine.connect()
示例#39
0
    def __init__(self, engine, schema=None):
        self._metadata = MetaData(schema=schema)
        # TODO This is hackish. How to check if we already have defined the mapping?
        clear_mappers()
        self.mappers = []

        _feedinfo_id_column = Column('feed_id', String, primary_key=True)
        _agency_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _route_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _feedinfo_mapper = Table('feed_info', self._metadata,
                    _feedinfo_id_column,
                    Column('feed_publisher_name', String),
                    Column('feed_publisher_url', String),
                    Column('feed_contact_email', String), # Non-standard (yet) field
                    Column('feed_contact_url', String), # Non-standard (yet) field
                    Column('feed_lang', String),
                    Column('feed_start_date', Date),
                    Column('feed_end_date', Date),
                    Column('feed_version', String))
        self.mappers.append(mapper(FeedInfo, _feedinfo_mapper, properties={
        }))

        _agency_id_column = Column('agency_id', String, primary_key=True)
        _route_agency_id_column = Column('agency_id', String, nullable=False)
        _agency_mapper = Table('agency', self._metadata,
                    _agency_feed_id_column,
                    _agency_id_column,
                    Column('agency_name', String, nullable=False),
                    Column('agency_url', String, nullable=False),
                    Column('agency_timezone', String, nullable=False),
                    Column('agency_lang', String),
                    Column('agency_phone', String),
                    Column('agency_fare_url', String),
                    Column('agency_email', String))
        self.mappers.append(mapper(Agency, _agency_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('agencies', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_agency_feed_id_column))
        }))

        _zone_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _zone_id_column = Column('zone_id', String, primary_key=True)
        _zone_mapper = Table('zones', self._metadata,
                    _zone_feed_id_column,
                    _zone_id_column)
        self.mappers.append(mapper(Zone, _zone_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('zones', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_zone_feed_id_column))
        }))

        _stop_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _stop_id_column = Column('stop_id', String, primary_key=True)
        _stop_parent_id_column = Column('parent_station_id', String, nullable=True)
        _stop_zone_id_column = Column('zone_id', String, nullable=True)
        _stop_mapper = Table('stops', self._metadata,
                    _stop_feed_id_column,
                    _stop_id_column,
                    _stop_parent_id_column,
                    Column('location_type', Integer, nullable=False),
                    Column('stop_name', String, nullable=False),
                    Column('stop_lat', Float, nullable=False),
                    Column('stop_lon', Float, nullable=False),
                    Column('wheelchair_boarding', Integer, nullable=False),
                    Column('stop_code', String),
                    Column('stop_desc', String),
                    _stop_zone_id_column,
                    Column('stop_url', String),
                    Column('stop_timezone', String),
                    ForeignKeyConstraint(['feed_id', 'parent_station_id'], ['stops.feed_id', 'stops.stop_id']),
                    ForeignKeyConstraint(['feed_id', 'zone_id'], ['zones.feed_id', 'zones.zone_id']),
                    # TODO Make those index parametrable
                    Index('idx_stops_lat', 'stop_lat'),
                    Index('idx_stops_lon', 'stop_lon'),
                    Index('idx_stops_code', 'feed_id', 'stop_code'),
                    Index('idx_stops_zone', 'feed_id', 'zone_id'),
                    Index('idx_stops_parent', 'feed_id', 'parent_station_id'))
        self.mappers.append(mapper(Stop, _stop_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('stops', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_stop_feed_id_column)),
            'sub_stops' : relationship(Stop, remote_side=[_stop_feed_id_column, _stop_parent_id_column], uselist=True,
                                       primaryjoin=(_stop_parent_id_column == foreign(_stop_id_column)) & (_stop_feed_id_column == _stop_feed_id_column)),
            'parent_station' : relationship(Stop, remote_side=[_stop_feed_id_column, _stop_id_column],
                                       primaryjoin=(_stop_id_column == foreign(_stop_parent_id_column)) & (_stop_feed_id_column == _stop_feed_id_column)),
            'zone' : relationship(Zone, backref=backref('stops', cascade="all,delete-orphan"),
                                   primaryjoin=(_zone_id_column == foreign(_stop_zone_id_column)) & (_zone_feed_id_column == _stop_feed_id_column))
        }))

        _transfer_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _transfer_from_stop_id_column = Column('from_stop_id', String, primary_key=True)
        _transfer_to_stop_id_column = Column('to_stop_id', String, primary_key=True)
        _transfer_mapper = Table('transfers', self._metadata,
                    _transfer_feed_id_column,
                    _transfer_from_stop_id_column,
                    _transfer_to_stop_id_column,
                    Column('transfer_type', Integer, nullable=False),
                    Column('min_transfer_time', Integer),
                    ForeignKeyConstraint(['feed_id', 'from_stop_id'], ['stops.feed_id', 'stops.stop_id']),
                    ForeignKeyConstraint(['feed_id', 'to_stop_id'], ['stops.feed_id', 'stops.stop_id']),
                    Index('idx_transfer_from', 'feed_id', 'from_stop_id'),
                    Index('idx_transfer_to', 'feed_id', 'to_stop_id'))
        self.mappers.append(mapper(Transfer, _transfer_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('transfers', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_transfer_feed_id_column)),
            'from_stop' : relationship(Stop, backref=backref('from_transfers', cascade='all', uselist=True), uselist=False,
                                       primaryjoin=(_transfer_from_stop_id_column == foreign(_stop_id_column)) & (_transfer_feed_id_column == _stop_feed_id_column)),
            'to_stop' : relationship(Stop, backref=backref('to_transfers', cascade='all', uselist=True), uselist=False,
                                     primaryjoin=(_transfer_to_stop_id_column == foreign(_stop_id_column)) & (_transfer_feed_id_column == _stop_feed_id_column))
        }))

        _route_id_column = Column('route_id', String, primary_key=True)
        _route_mapper = Table('routes', self._metadata,
                    _route_feed_id_column,
                    _route_id_column,
                    _route_agency_id_column,
                    Column('route_short_name', String),
                    Column('route_long_name', String),
                    Column('route_desc', String),
                    Column('route_type', Integer, nullable=False),
                    Column('route_url', String),
                    Column('route_color', String),
                    Column('route_text_color', String),
                    ForeignKeyConstraint(['feed_id', 'agency_id'], ['agency.feed_id', 'agency.agency_id']),
                    Index('idx_routes_agency', 'feed_id', 'agency_id'),
                    Index('idx_routes_short_name', 'feed_id', 'route_short_name'),
                    Index('idx_routes_type', 'feed_id', 'route_type'))
        self.mappers.append(mapper(Route, _route_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('routes', cascade="all,delete-orphan"),
                                    primaryjoin=_feedinfo_id_column == foreign(_route_feed_id_column)),
            'agency' : relationship(Agency, backref=backref('routes', cascade="all,delete-orphan"),
                                    primaryjoin=(_agency_id_column == foreign(_route_agency_id_column)) & (_agency_feed_id_column == _route_feed_id_column))
        }))

        _calendar_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _calendar_id_column = Column('service_id', String, primary_key=True)
        _calendar_mapper = Table('calendar', self._metadata,
                    _calendar_feed_id_column,
                    _calendar_id_column
                    )
        self.mappers.append(mapper(Calendar, _calendar_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('calendars', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_calendar_feed_id_column))
        }))

        _calendar_date_mapper = Table('calendar_dates', self._metadata,
                    Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True),
                    Column('service_id', String, primary_key=True),
                    Column('date', Date, primary_key=True),
                    ForeignKeyConstraint(['feed_id', 'service_id'], ['calendar.feed_id', 'calendar.service_id']),
                    # TOCHECK It seems a composite primary key on (a,b,c) does not need indexing on left elements,
                    # such as (a) and (a,b); but need on (a,c) for example.
                    Index('idx_calendar_dates_date', 'feed_id', 'date'))
        self.mappers.append(mapper(CalendarDate, _calendar_date_mapper, properties={
            'calendar' : relationship(Calendar, backref=backref('dates', cascade="all,delete-orphan"))
        }))

        _shape_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _shape_id_column = Column('shape_id', String, primary_key=True)
        _shape_mapper = Table('shapes', self._metadata,
                    _shape_feed_id_column,
                    _shape_id_column
                    )
        self.mappers.append(mapper(Shape, _shape_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('shapes', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_shape_feed_id_column))
        }))

        _shape_pt_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _shape_pt_shape_id_column = Column('shape_id', String, primary_key=True)
        _shape_pt_seq_column = Column('shape_pt_sequence', Integer, primary_key=True)
        _shape_pt_mapper = Table('shape_pts', self._metadata,
                    _shape_pt_feed_id_column,
                    _shape_pt_shape_id_column,
                    _shape_pt_seq_column,
                    Column('shape_dist_traveled', Float, nullable=False),
                    Column('shape_pt_lat', Float, nullable=False),
                    Column('shape_pt_lon', Float, nullable=False),
                    ForeignKeyConstraint(['feed_id', 'shape_id'], ['shapes.feed_id', 'shapes.shape_id']),
                    Index('idx_shape_pt_shape', 'feed_id', 'shape_id'))
        self.mappers.append(mapper(ShapePoint, _shape_pt_mapper, properties={
            # Note: here we specify foreign() on shape_pt feed_id column as there is no ownership relation of feed to shape_pts
            'shape' : relationship(Shape, backref=backref('points', order_by=_shape_pt_seq_column, cascade="all,delete-orphan"),
                                  primaryjoin=(_shape_id_column == foreign(_shape_pt_shape_id_column)) & (_shape_feed_id_column == foreign(_shape_pt_feed_id_column)))
        }))

        _trip_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _trip_id_column = Column('trip_id', String, primary_key=True)
        _trip_route_id_column = Column('route_id', String, nullable=False)
        _trip_calendar_id_column = Column('service_id', String, nullable=False)
        _trip_shape_id_column = Column('shape_id', String, nullable=True)
        _trip_mapper = Table('trips', self._metadata,
                    _trip_feed_id_column,
                    _trip_id_column,
                    _trip_route_id_column,
                    _trip_calendar_id_column,
                    _trip_shape_id_column,
                    Column('wheelchair_accessible', Integer, nullable=False),
                    Column('bikes_allowed', Integer, nullable=False),
                    Column('exact_times', Integer, nullable=False),
                    Column('frequency_generated', Boolean, nullable=False),
                    Column('trip_headsign', String),
                    Column('trip_short_name', String),
                    Column('direction_id', Integer),
                    Column('block_id', String),
                    ForeignKeyConstraint(['feed_id', 'route_id'], ['routes.feed_id', 'routes.route_id']),
                    ForeignKeyConstraint(['feed_id', 'service_id'], ['calendar.feed_id', 'calendar.service_id']),
                    ForeignKeyConstraint(['feed_id', 'shape_id'], ['shapes.feed_id', 'shapes.shape_id']),
                    Index('idx_trips_route', 'feed_id', 'route_id'),
                    Index('idx_trips_service', 'feed_id', 'service_id'))
        self.mappers.append(mapper(Trip, _trip_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('trips', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_trip_feed_id_column)),
            'route' : relationship(Route, backref=backref('trips', cascade="all,delete-orphan"),
                                   primaryjoin=(_route_id_column == foreign(_trip_route_id_column)) & (_route_feed_id_column == _trip_feed_id_column)),
            'calendar' : relationship(Calendar, backref=backref('trips', cascade="all,delete-orphan"),
                                      primaryjoin=(_calendar_id_column == foreign(_trip_calendar_id_column)) & (_calendar_feed_id_column == _trip_feed_id_column)),
            'shape' : relationship(Shape, backref=backref('trips', cascade="all,delete-orphan"),
                                      primaryjoin=(_shape_id_column == foreign(_trip_shape_id_column)) & (_shape_feed_id_column == _trip_feed_id_column))
        }))

        _stop_times_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _stop_times_trip_id_column = Column('trip_id', String, primary_key=True)
        _stop_seq_column = Column('stop_sequence', Integer, primary_key=True)
        _stop_times_stop_id_column = Column('stop_id', String, nullable=False)
        _stop_times_mapper = Table('stop_times', self._metadata,
                    _stop_times_feed_id_column,
                    _stop_times_trip_id_column,
                    _stop_seq_column,
                    _stop_times_stop_id_column,
                    Column('arrival_time', Integer, nullable=True),
                    Column('departure_time', Integer, nullable=True),
                    Column('interpolated', Boolean, nullable=False),
                    Column('shape_dist_traveled', Float, nullable=False),
                    Column('timepoint', Integer, nullable=False),
                    Column('pickup_type', Integer, nullable=False),
                    Column('drop_off_type', Integer, nullable=False),
                    Column('stop_headsign', String),
                    ForeignKeyConstraint(['feed_id', 'trip_id'], ['trips.feed_id', 'trips.trip_id']),
                    ForeignKeyConstraint(['feed_id', 'stop_id'], ['stops.feed_id', 'stops.stop_id']),
                    Index('idx_stop_times_stop', 'feed_id', 'stop_id'),
                    Index('idx_stop_times_sequence', 'feed_id', 'stop_sequence'))
        self.mappers.append(mapper(StopTime, _stop_times_mapper, properties={
            # Note: here we specify foreign() on stop_times feed_id column as there is no ownership relation of feed to stop_times
            'trip' : relationship(Trip, backref=backref('stop_times', order_by=_stop_seq_column, cascade="all,delete-orphan"),
                                  primaryjoin=(_trip_id_column == foreign(_stop_times_trip_id_column)) & (_trip_feed_id_column == foreign(_stop_times_feed_id_column))),
            'stop' : relationship(Stop, backref=backref('stop_times', cascade="all,delete-orphan"),
                                  primaryjoin=(_stop_id_column == foreign(_stop_times_stop_id_column)) & (_stop_feed_id_column == _stop_times_feed_id_column)),
        }))

        _fareattr_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _fareattr_id_column = Column('fare_id', String, primary_key=True)
        _fareattr_mapper = Table('fare_attributes', self._metadata,
                    _fareattr_feed_id_column,
                    _fareattr_id_column,
                    Column('price', Float, nullable=False),
                    Column('currency_type', String, nullable=False),
                    Column('payment_method', Integer, nullable=False),
                    Column('transfers', Integer),
                    Column('transfer_duration', Integer))
        self.mappers.append(mapper(FareAttribute, _fareattr_mapper, properties={
            'feed' : relationship(FeedInfo, backref=backref('fare_attributes', cascade="all,delete-orphan"),
                                  primaryjoin=_feedinfo_id_column == foreign(_fareattr_feed_id_column))
        }))

        _farerule_feed_id_column = Column('feed_id', String, ForeignKey('feed_info.feed_id'), primary_key=True)
        _farerule_id_column = Column('fare_id', String, primary_key=True)
        _farerule_route_id_column = Column('route_id', String, primary_key=True, nullable=True)
        _farerule_origin_id_column = Column('origin_id', String, primary_key=True, nullable=True)
        _farerule_destination_id_column = Column('destination_id', String, primary_key=True, nullable=True)
        _farerule_contains_id_column = Column('contains_id', String, primary_key=True, nullable=True)
        _farerule_mapper = Table('fare_rules', self._metadata,
                    _farerule_feed_id_column,
                    _farerule_id_column,
                    _farerule_route_id_column,
                    _farerule_origin_id_column,
                    _farerule_destination_id_column,
                    _farerule_contains_id_column,
                    ForeignKeyConstraint(['feed_id', 'fare_id'], ['fare_attributes.feed_id', 'fare_attributes.fare_id']))
        self.mappers.append(mapper(FareRule, _farerule_mapper, properties={
            'fare_attribute' : relationship(FareAttribute, backref=backref('fare_rules', cascade="all,delete-orphan")),
            'route' : relationship(Route, backref=backref('fare_rules', cascade="all,delete-orphan"),
                            primaryjoin=(_route_id_column == foreign(_farerule_route_id_column)) & (_route_feed_id_column == _farerule_feed_id_column)),
            'origin' : relationship(Zone, backref=backref('origin_fare_rules', cascade="all,delete-orphan"),
                            primaryjoin=(_zone_id_column == foreign(_farerule_origin_id_column)) & (_zone_feed_id_column == _farerule_feed_id_column)),
            'destination' : relationship(Zone, backref=backref('destination_fare_rules', cascade="all,delete-orphan"),
                            primaryjoin=(_zone_id_column == foreign(_farerule_destination_id_column)) & (_zone_feed_id_column == _farerule_feed_id_column)),
            'contains' : relationship(Zone, backref=backref('contains_fare_rules', cascade="all,delete-orphan"),
                            primaryjoin=(_zone_id_column == foreign(_farerule_contains_id_column)) & (_zone_feed_id_column == _farerule_feed_id_column))
        }))

        self._metadata.create_all(engine)
        self._class_for_table = {}
        self._table_for_class = {}
        for _mapper in self.mappers:
            self._class_for_table[_mapper.mapped_table.name] = _mapper.class_
            self._table_for_class[_mapper.class_] = _mapper.mapped_table.name
示例#40
0
def upgrade():

    # convert activities
    activity_conversions = [
        ('hiking', 'other'),
        ('snowshoeing', 'other'),
        ('paragliding', 'other'),
        ('mountain_biking', 'other'),
        ('via_ferrata', 'other'),
        ('slacklining', 'other'),
        ('skitouring', 'skitouring'),
        ('snow_ice_mixed', 'snow_ice_mixed'),
        ('mountain_climbing', 'alpine_climbing'),
        ('rock_climbing', 'sport_climbing'),
        ('ice_climbing', 'ice_climbing'),
    ]
    old_activity_type = ArrayOfEnum(
        sa.Enum('skitouring',
                'snow_ice_mixed',
                'mountain_climbing',
                'rock_climbing',
                'ice_climbing',
                'hiking',
                'snowshoeing',
                'paragliding',
                'mountain_biking',
                'via_ferrata',
                'slacklining',
                name='activity_type',
                schema='guidebook'))
    new_activity_type = sa.Enum('sport_climbing',
                                'multipitch_climbing',
                                'alpine_climbing',
                                'snow_ice_mixed',
                                'ice_climbing',
                                'skitouring',
                                'other',
                                name='event_activity_type',
                                schema='guidebook')
    new_activity_type.create(op.get_bind())
    op.add_column('xreports',
                  sa.Column('event_activity', new_activity_type),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('event_activity', new_activity_type),
                  schema='guidebook')

    xr = Table('xreports',
               MetaData(),
               sa.Column('activities', old_activity_type),
               sa.Column('event_activity', new_activity_type, nullable=True),
               schema='guidebook')
    xra = Table('xreports_archives',
                MetaData(),
                sa.Column('activities', old_activity_type),
                sa.Column('event_activity', new_activity_type, nullable=True),
                schema='guidebook')
    for (old_value, new_value) in activity_conversions:
        op.execute(xr.update().where(
            xr.c.activities.contains(
                sa.literal([old_value]).cast(old_activity_type))).values(
                    event_activity=op.inline_literal(new_value)))
        op.execute(xra.update().where(
            xra.c.activities.contains(
                sa.literal([old_value]).cast(old_activity_type))).values(
                    event_activity=op.inline_literal(new_value)))

    op.alter_column('xreports',
                    'event_activity',
                    nullable=False,
                    schema='guidebook')
    op.alter_column('xreports_archives',
                    'event_activity',
                    nullable=False,
                    schema='guidebook')

    op.drop_column('xreports', 'activities', schema='guidebook')
    op.drop_column('xreports_archives', 'activities', schema='guidebook')

    # end of activities conversion

    # convert types
    type_conversions = [('avalanche', 'avalanche'),
                        ('stone_fall', 'stone_ice_fall'),
                        ('falling_ice', 'stone_ice_fall'),
                        ('person_fall', 'person_fall'),
                        ('crevasse_fall', 'crevasse_fall'),
                        ('roped_fall', 'person_fall'),
                        ('physical_failure', 'physical_failure'),
                        ('lightning', 'weather_event'), ('other', 'other')]
    old_event_type = ArrayOfEnum(
        sa.Enum('avalanche',
                'stone_fall',
                'falling_ice',
                'person_fall',
                'crevasse_fall',
                'roped_fall',
                'physical_failure',
                'lightning',
                'other',
                name='event_type',
                schema='guidebook'))
    new_event_type = sa.Enum('avalanche',
                             'stone_ice_fall',
                             'ice_cornice_collapse',
                             'person_fall',
                             'crevasse_fall',
                             'physical_failure',
                             'injury_without_fall',
                             'blocked_person',
                             'weather_event',
                             'safety_operation',
                             'critical_situation',
                             'other',
                             name='event_type_',
                             schema='guidebook')
    new_event_type.create(op.get_bind())
    op.add_column('xreports',
                  sa.Column('event_type_', new_event_type),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('event_type_', new_event_type),
                  schema='guidebook')

    xr = Table('xreports',
               MetaData(),
               sa.Column('event_type', old_event_type),
               sa.Column('event_type_', new_event_type),
               schema='guidebook')
    xra = Table('xreports_archives',
                MetaData(),
                sa.Column('event_type', old_event_type),
                sa.Column('event_type_', new_event_type),
                schema='guidebook')
    for (old_value, new_value) in type_conversions:
        op.execute(xr.update().where(
            xr.c.event_type.contains(
                sa.literal([old_value]).cast(old_event_type))).values(
                    event_type_=op.inline_literal(new_value)))
        op.execute(xra.update().where(
            xra.c.event_type.contains(
                sa.literal([old_value]).cast(old_event_type))).values(
                    event_type_=op.inline_literal(new_value)))

    op.alter_column('xreports',
                    'event_type',
                    nullable=False,
                    schema='guidebook')
    op.alter_column('xreports_archives',
                    'event_type',
                    nullable=False,
                    schema='guidebook')

    op.drop_column('xreports', 'event_type', schema='guidebook')
    op.drop_column('xreports_archives', 'event_type', schema='guidebook')
    drop_enum('event_type', schema='guidebook')
    op.execute('ALTER TYPE guidebook.event_type_ RENAME TO event_type')
    op.alter_column('xreports',
                    'event_type_',
                    new_column_name='event_type',
                    schema='guidebook')
    op.alter_column('xreports_archives',
                    'event_type_',
                    new_column_name='event_type',
                    schema='guidebook')

    xr = Table('xreports',
               MetaData(),
               sa.Column('document_id', sa.types.INTEGER),
               sa.Column('event_type', new_event_type),
               schema='guidebook')
    # xra = Table('xreports_archives', MetaData(),
    #             sa.Column('document_id', sa.types.INTEGER),
    #             sa.Column('event_type', new_event_type),
    #             schema='guidebook')
    try:
        with open(
                './alembic_migration/versions/06d2a35e39c8_improve_serac_database_data.csv'
        ) as f:
            rr = csv.reader(f)
            header = rr.__next__()
            assert (header[1] == 'Document') and (header[8] == 'ENS principal')
            for line in rr:
                print("update {} -> {}".format(
                    line[1], key_map[line[8].lower().encode()]))
                op.execute(
                    xr.update().where(xr.c.document_id == line[1]).values(
                        event_type=key_map[line[8].lower().encode()]))
                # op.execute(xra.update()
                #            .where(xra.c.document_id == line[1])
                #            .values(event_type=key_map[line[8].lower()]))
    except Exception as e:
        print("EXCEPT!!! {} {}".format(type(e), e))
    # end of types conversion

    # convert autonomy enum
    autonomy_conversions = [('non_autonomous', 'non_autonomous'),
                            ('autonomous', 'autonomous'),
                            ('initiator', 'autonomous'), ('expert', 'expert')]
    old_autonomy_type = sa.Enum('non_autonomous',
                                'autonomous',
                                'initiator',
                                'expert',
                                name='autonomy',
                                schema='guidebook')
    new_autonomy_type = sa.Enum('non_autonomous',
                                'autonomous',
                                'expert',
                                name='autonomy_',
                                schema='guidebook')
    new_autonomy_type.create(op.get_bind())
    # op.alter_column('xreports', 'autonomy',
    #                 type_=new_autonomy_type,
    #                 existing_type=old_autonomy_type,
    #                 schema='guidebook')
    # does not allow automatic casting if table not empty

    op.add_column('xreports',
                  sa.Column('autonomy_', new_autonomy_type, nullable=True),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('autonomy_', new_autonomy_type, nullable=True),
                  schema='guidebook')

    xr = Table('xreports',
               MetaData(),
               sa.Column('autonomy', old_autonomy_type),
               sa.Column('autonomy_', new_autonomy_type, nullable=True),
               schema='guidebook')
    xra = Table('xreports_archives',
                MetaData(),
                sa.Column('autonomy', old_autonomy_type),
                sa.Column('autonomy_', new_autonomy_type, nullable=True),
                schema='guidebook')
    for (old_value, new_value) in autonomy_conversions:
        op.execute(xr.update().where(
            xr.c.autonomy == op.inline_literal(old_value)).values(
                autonomy_=op.inline_literal(new_value)))
        op.execute(xra.update().where(
            xra.c.autonomy == op.inline_literal(old_value)).values(
                autonomy_=op.inline_literal(new_value)))

    op.drop_column('xreports', 'autonomy', schema='guidebook')
    op.drop_column('xreports_archives', 'autonomy', schema='guidebook')
    # op.drop_column('xreports_archives', 'avalanche_slope', schema='guidebook')
    drop_enum('autonomy', schema='guidebook')

    # Rename enum
    op.execute('ALTER TYPE guidebook.autonomy_ RENAME TO autonomy')

    # Rename column
    op.alter_column('xreports',
                    'autonomy_',
                    new_column_name='autonomy',
                    schema='guidebook')
    op.alter_column('xreports_archives',
                    'autonomy_',
                    new_column_name='autonomy',
                    schema='guidebook')

    # end of autonomy conversion

    # convert activity enum
    activity_conversions = [('activity_rate_150', 'activity_rate_w1'),
                            ('activity_rate_50', 'activity_rate_w1'),
                            ('activity_rate_30', 'activity_rate_m2'),
                            ('activity_rate_20', 'activity_rate_m2'),
                            ('activity_rate_10', 'activity_rate_y5'),
                            ('activity_rate_5', 'activity_rate_y5'),
                            ('activity_rate_1', 'activity_rate_y5')]
    old_activity_type = sa.Enum('activity_rate_150',
                                'activity_rate_50',
                                'activity_rate_30',
                                'activity_rate_20',
                                'activity_rate_10',
                                'activity_rate_5',
                                'activity_rate_1',
                                name='activity_rate',
                                schema='guidebook')
    new_activity_type = sa.Enum('activity_rate_y5',
                                'activity_rate_m2',
                                'activity_rate_w1',
                                name='activity_rate_',
                                schema='guidebook')
    new_activity_type.create(op.get_bind())

    op.add_column('xreports',
                  sa.Column('activity_rate_', new_activity_type,
                            nullable=True),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('activity_rate_', new_activity_type,
                            nullable=True),
                  schema='guidebook')

    xr = Table('xreports',
               MetaData(),
               sa.Column('activity_rate', old_activity_type),
               sa.Column('activity_rate_', new_activity_type, nullable=True),
               schema='guidebook')
    xra = Table('xreports_archives',
                MetaData(),
                sa.Column('activity_rate', old_activity_type),
                sa.Column('activity_rate_', new_activity_type, nullable=True),
                schema='guidebook')
    for (old_value, new_value) in activity_conversions:
        op.execute(xr.update().where(
            xr.c.activity_rate == op.inline_literal(old_value)).values(
                activity_rate_=op.inline_literal(new_value)))
        op.execute(xra.update().where(
            xra.c.activity_rate == op.inline_literal(old_value)).values(
                activity_rate_=op.inline_literal(new_value)))

    op.drop_column('xreports', 'activity_rate', schema='guidebook')
    op.drop_column('xreports_archives', 'activity_rate', schema='guidebook')
    # op.drop_column('xreports_archives', 'avalanche_slope', schema='guidebook')
    drop_enum('activity_rate', schema='guidebook')

    # Rename enum
    op.execute('ALTER TYPE guidebook.activity_rate_ RENAME TO activity_rate')

    # Rename column
    op.alter_column('xreports',
                    'activity_rate_',
                    new_column_name='activity_rate',
                    schema='guidebook')
    op.alter_column('xreports_archives',
                    'activity_rate_',
                    new_column_name='activity_rate',
                    schema='guidebook')

    # end of activity conversion

    op.drop_column('xreports', 'nb_outings', schema='guidebook')
    op.drop_column('xreports_archives', 'nb_outings', schema='guidebook')
    # op.drop_column('xreports_archives', 'avalanche_slope', schema='guidebook')
    drop_enum('nb_outings', schema='guidebook')

    supervision_type = sa.Enum('no_supervision',
                               'federal_supervision',
                               'professional_supervision',
                               name='supervision_type',
                               schema='guidebook')
    supervision_type.create(op.get_bind())
    op.add_column('xreports',
                  sa.Column('supervision', supervision_type, nullable=True),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('supervision', supervision_type, nullable=True),
                  schema='guidebook')

    qualification_type = sa.Enum('federal_supervisor',
                                 'federal_trainer',
                                 'professional_diploma',
                                 name='qualification_type',
                                 schema='guidebook')
    qualification_type.create(op.get_bind())
    op.add_column('xreports',
                  sa.Column('qualification', qualification_type,
                            nullable=True),
                  schema='guidebook')
    op.add_column('xreports_archives',
                  sa.Column('qualification', qualification_type,
                            nullable=True),
                  schema='guidebook')
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column('account_notes',
                  sa.Column('text_payload', sa.UnicodeText(), nullable=True))
    op.add_column('account_notes',
                  sa.Column('type', sa.String(length=50), nullable=True))
    op.execute('COMMIT')

    # init SQLAlchemy
    metadata = MetaData(naming_convention=convention)
    db = SQLAlchemy(app, metadata=metadata)
    notes: AccountNote = db.session.query(AccountNote).all()
    for dbnote in notes:
        if dbnote.note == '':
            dbnote.note = None

        if dbnote.note == 'Account Deactivated':
            dbnote.jsonPayload = {'new_disabled': True}
            dbnote.note = None
            dbnote.type = account_notes.TYPE_ACCOUNT_ACTIVE_CHANGED
        elif dbnote.note == 'Account Activated':
            dbnote.jsonPayload = {'new_disabled': False}
            dbnote.note = None
            dbnote.type = account_notes.TYPE_ACCOUNT_ACTIVE_CHANGED
        elif dbnote.note is not None and dbnote.note.startswith(
                'Creating account. '):
            dbnote.type = account_notes.TYPE_ACCOUNT_CREATED
            dbnote.note = dbnote.note[18:]  # remove the start of string
        # if it has role_changes and is not creat account is is a role change
        elif len(dbnote.role_changes) > 0:
            dbnote.type = account_notes.TYPE_ACCOUNT_ROLES_CHANGED
        elif dbnote.note is not None and dbnote.note.startswith(
                'Added role with name'):
            '''
            Old note format was:
            Added role with name{role_name} and displayName {role_display_name}
            '''
            result = re.search('Added role with name(.*) and displayName (.*)',
                               dbnote.note)
            if result:
                name = result.group(1)
                displayName = result.group(2)
                dbnote.jsonPayload = {
                    'role_name': name,
                    'role_display_name': displayName
                }
                dbnote.note = None
                dbnote.type = account_notes.TYPE_ROLE_CREATED
            else:
                print(
                    f"Error in conversion to ROLE_CREATED with Note={dbnote} entryID={dbnote.entryID}"
                )
        elif dbnote.note is not None and dbnote.note.startswith(
                'Send mail to main character linked to this account with id='):
            '''
            For this we can not recover all the information since it was not logged
            Old note format was:
            Send mail to main character linked to this account with id={acc.current_char} and name={acc.current_char_obj.eve_name}
            '''
            result = re.search(
                'Send mail to main character linked to this account with id=(\d+) and name=.*',
                dbnote.note)
            if result:
                character_id = result.group(1)
                dbnote.jsonPayload = {
                    'sender_character_id': None,
                    'target_character_id': character_id,
                    'mail_body': None,
                    'subject': None
                }
                dbnote.note = None
                dbnote.type = account_notes.TYPE_GOT_ACCOUNT_MAIL
            else:
                print(
                    f"Error in conversion to GOT_ACCOUNT_MAIL with Note={dbnote} entryID={dbnote.entryID}"
                )
        else:  # other notes should be human writting notes
            dbnote.type = account_notes.TYPE_HUMAN
    db.session.commit()
    db.session.close()

    op.alter_column('account_notes',
                    'type',
                    nullable=False,
                    existing_type=sa.String(length=50))
    op.create_index(op.f('ix_account_notes_type'),
                    'account_notes', ['type'],
                    unique=False)
示例#42
0
if redis_config["pass"]:
    redis_url = "redis://:" + redis_config["pass"] + "@" + redis_config[
        "host"] + ":" + redis_config["port"]

    rds_instance = redis.Redis(host=redis_config["host"],
                               port=redis_config["port"],
                               password=redis_config["pass"])
else:
    redis_url = "redis://" + redis_config["host"] + ":" + redis_config["port"]
    rds_instance = redis.Redis(host=redis_config["host"],
                               port=redis_config["port"])

mongo_url = "mongodb://{username}:{password}@{host}:{port}".format(
    username=get_config("MONGODB_USERNAME", ""),
    password=get_config("MONGODB_PASSWORD", ""),
    host=get_config("MONGODB_HOST", "mongodb"),
    port=get_config("MONGODB_PORT", "27017"))

engine = create_engine(mysql_url)

metadata = MetaData()

Base = declarative_base()

# Base.prepare(engine, reflect=True)
db_session = scoped_session(
    sessionmaker(autocommit=False, autoflush=False, bind=engine))

Base.query = db_session.query_property()
 def __init__(self, conn_str):
     # create engine and metadata
     self.engine = create_engine(conn_str, echo=False)
     self.metadata = MetaData(self.engine)