コード例 #1
0
    def test_semantic_version_field(self) -> None:
        id_colname = "id"
        version_colname = "version"
        id_col = Column(id_colname, Integer, primary_key=True)
        version_col = Column(version_colname, SemanticVersionColType)

        table = Table("testtable", self.meta, id_col, version_col)
        table.create()

        v1 = Version("1.1.0")
        v2 = Version("2.0.1")
        v3 = Version("14.0.0")

        table.insert().values([
            {
                id_colname: 1,
                version_colname: v1
            },
            {
                id_colname: 2,
                version_colname: v2
            },
            {
                id_colname: 3,
                version_colname: v3
            },
        ]).execute()
        select_fields = [id_col, version_col]
        rows = list(
            select(select_fields).select_from(table).order_by(
                id_col).execute())
        self._assert_version_equal(rows[0][version_col], v1)
        self._assert_version_equal(rows[1][version_col], v2)
        self._assert_version_equal(rows[2][version_col], v3)
コード例 #2
0
    def test_iso_duration_field(self) -> None:
        id_colname = "id"
        duration_colname = "duration_iso"
        id_col = Column(id_colname, Integer, primary_key=True)
        duration_col = Column(duration_colname,
                              PendulumDurationAsIsoTextColType)

        table = Table("testtable", self.meta, id_col, duration_col)
        table.create()

        d1 = Duration(years=1, months=3, seconds=3, microseconds=4)
        d2 = Duration(seconds=987.654321)
        d3 = Duration(days=-5)

        table.insert().values([
            {
                id_colname: 1,
                duration_colname: d1
            },
            {
                id_colname: 2,
                duration_colname: d2
            },
            {
                id_colname: 3,
                duration_colname: d3
            },
        ]).execute()
        select_fields = [id_col, duration_col]
        rows = list(
            select(select_fields).select_from(table).order_by(
                id_col).execute())
        self._assert_duration_equal(rows[0][duration_col], d1)
        self._assert_duration_equal(rows[1][duration_col], d2)
        self._assert_duration_equal(rows[2][duration_col], d3)
コード例 #3
0
 def insert_sample_email(self, sample_table_name, email):
     if not isinstance(email, Email):
         print 'ERROR: input must be of type Email'
         return
     
     if sample_table_name == self.table_name:
         print('Cannot use the same table name')
         return
     
     email_table = Table(sample_table_name, self.metadata)
     ins_stmt = email_table.insert()
     conn = self.engine.connect()
     conn.execute(ins_stmt, date=email.date,
                           mime_type=email.mime_type,
                           from_addr=email.from_addr,
                           to_addr=email.to_addr,
                           subject=email.subject,
                           raw_body=email.raw_body,
                           cleaned_body=email.cleaned_body,
                           one_line=email.one_line,
                           path=email.path,
                           label=email.label,
                           prediction=email.prediction,
                           probability=email.probability,
                           ) 
コード例 #4
0
    def save_data_to_database(self, df: pd.DataFrame, engine: Engine):
        if not AbstractPreprocessor._check_write_privilege(engine):
            AbstractPreprocessor.create_dir(self.output_dir, os.getlogin())
            path = Path.joinpath(self.output_dir, os.getlogin(),
                                 datetime.now().isoformat() + '.json')

            with path.open("a") as f:
                df.to_json(f)
            return
        for idx, row in df.iterrows():
            with engine.connect() as conn:
                t = Table('judgment_map', MetaData(), autoload_with=engine)
                if row['judgments']:  # only insert, when we find judgments
                    # Delete and reinsert as no upsert command is available
                    stmt = t.delete().where(delete_stmt_decisions_with_df(df))
                    conn.execute(stmt)
                    for k in row['judgments']:
                        judgment_type_id = Judgment(k).value
                        stmt = t.insert().values([{
                            "decision_id":
                            str(row['decision_id']),
                            "judgment_id":
                            judgment_type_id
                        }])
                        conn.execute(stmt)
                else:
                    self.logger.warning(
                        f"No judgments found for {row['html_url']}")
コード例 #5
0
    def save_data_to_database(self, df: pd.DataFrame, engine: Engine):
        with engine.connect() as conn:
            t = Table('lower_court', MetaData(), autoload_with=conn)
            # Delete and reinsert as no upsert command is available
            stmt = t.delete().where(delete_stmt_decisions_with_df(df))
            conn.execute(stmt)
            for _, row in df.iterrows():
                if not 'lower_court' in row or row['lower_court'] is None:
                    continue
                lower_court = row["lower_court"]
                res = {}

                if 'court' in lower_court and lower_court['court'] is not None:
                    res['court_id'] = list(
                        self.select(
                            engine, 'court', 'court_id',
                            f"court_string = '{lower_court['court']}'")
                    )[0]['court_id'][0]
                    res['court_id'] = int(
                        res['court_id']
                    ) if res['court_id'] is not None else None
                if 'canton' in lower_court and lower_court[
                        'canton'] is not None:
                    res['canton_id'] = list(
                        self.select(engine, 'canton', 'canton_id',
                                    f"short_code = '{lower_court['canton']}'")
                    )[0]['canton_id'][0]
                    res['canton_id'] = int(
                        res['canton_id']
                    ) if res['canton_id'] is not None else None
                if 'chamber' in lower_court and lower_court[
                        'chamber'] is not None:
                    res['chamber_id'] = list(
                        self.select(
                            engine, 'chamber', 'chamber_id',
                            f"chamber_string = '{lower_court['chamber']}'")
                    )[0]['chamber_id'][0]
                    res['chamber_id'] = int(
                        res['chamber_id']
                    ) if res['chamber_id'] is not None else None

                    stmt = t.insert().values([{
                        "decision_id":
                        str(row['decision_id']),
                        "court_id":
                        res.get('court_id'),
                        "canton_id":
                        res.get('canton_id'),
                        "chamber_id":
                        res.get('chamber_id'),
                        "date":
                        lower_court.get('date'),
                        "file_number":
                        lower_court.get('file_number')
                    }])
                    conn.execute(stmt)
コード例 #6
0
 def insert_address(self, email_address):
     if type(email) != EmailAddress:
         print 'ERROR: input must be of type EmailAddress'
         return
     
     email_address_table = Table('email_address', self.metadata)
     ins_stmt = email_address_table.insert()
     conn = self.engine.connect()
     result = conn.execute(ins_stmt, address=email_address.address,
                           name=email_address.name)
コード例 #7
0
async def get_or_create(
    query_clause: str, query_values: dict, table: Table, insert_values: dict
):
    """取得或建立"""
    result = await database.fetch_one(query=query_clause, values=query_values)
    if result:
        # get
        return result

    query = table.insert()
    return await database.execute(query=query, values=insert_values)
コード例 #8
0
    def test_phone_number_field(self) -> None:
        id_colname = "id"
        phone_number_colname = "phone_number"
        id_col = Column(id_colname, Integer, primary_key=True)
        phone_number_col = Column(phone_number_colname, PhoneNumberColType)

        table = Table("testtable", self.meta, id_col, phone_number_col)
        table.create()

        # https://en.wikipedia.org/wiki/Fictitious_telephone_number
        p1 = phonenumbers.parse("+44 (0)113 496 0123")
        p2 = phonenumbers.parse("+33 1 99 00 12 34 56")
        p3 = phonenumbers.parse("07700 900123", "GB")
        p4 = None

        table.insert().values([
            {
                id_colname: 1,
                phone_number_colname: p1
            },
            {
                id_colname: 2,
                phone_number_colname: p2
            },
            {
                id_colname: 3,
                phone_number_colname: p3
            },
            {
                id_colname: 4,
                phone_number_colname: p4
            },
        ]).execute()
        select_fields = [id_col, phone_number_col]
        rows = list(
            select(select_fields).select_from(table).order_by(
                id_col).execute())
        self.assertEqual(rows[0][phone_number_col], p1)
        self.assertEqual(rows[1][phone_number_col], p2)
        self.assertEqual(rows[2][phone_number_col], p3)
        self.assertIsNone(rows[3][phone_number_col])
コード例 #9
0
 def insert_to_table(self, email, table_name):
     if not isinstance(email, Email):
         print 'ERROR: input must be of type Email'
         return
     
     email_table = Table(table_name, self.metadata)
     ins_stmt = email_table.insert()
     conn = self.engine.connect()
     result = conn.execute(ins_stmt, date=email.date,
                           mime_type=email.mime_type,
                           from_addr=email.from_addr,
                           to_addr=email.to_addr,
                           subject=email.subject,
                           body=email.body,
                           path=email.path,
                           label=email.label)
コード例 #10
0
def test_vector_greater_than():
    metadata = MetaData()
    table = Table('people', metadata,
                  Column('id', Integer, primary_key=True), 
                  Column('first_name', String),
                  Column('middle_name', String),
                  Column('last_name', String),
                  Column('blood_type', String))
    def random_person(idx):
        first = names.get_first_name()
        last = names.get_last_name()
        middle = random.choice([names.get_first_name, names.get_last_name, lambda: None])()
        blood_type = random.choice(['A', 'A', 'B', 'B', 'O', 'O', 'O', 'O', 'AB'])
        return {'id': idx,
                'first_name': first,
                'middle_name': middle,
                'last_name': last,
                'blood_type': blood_type
                }
    engine = create_engine('sqlite:///:memory:', echo=False)
    metadata.create_all(engine)
    def compare_results(compa, cols, vals):
        vals = tuple(map(none_to_minus_inf, vals))
        res = set([row['id'] for row in engine.execute(select(table.columns).where(compa))])
        all_ = [valmap(none_to_minus_inf, row) for row in engine.execute(select(table.columns))]
        cor = set()
        for row in all_:
            if tuple(row[col.name] for col in cols) > vals:
                cor.add(row['id'])
        assert_equal(res, cor)
    
    for i in range(1000):
        engine.execute(table.insert(random_person(i)))
    
    
    col_tuples = [(table.columns['id'],),
                  (table.columns['blood_type'], table.columns['id']),
                  (table.columns['blood_type'], table.columns['middle_name'], table.columns['id']),
                  (table.columns['blood_type'], table.columns['id'], table.columns['middle_name']),
                  (table.columns['middle_name'], table.columns['blood_type'], table.columns['id']),]
    val_tuples = [(5,),
                  ('AB', 500),
                  ('B', None, 500),
                  ('B', 500, None),
                  (None, 'B', 500)]
    for cols, vals in zip(col_tuples, val_tuples):
        compare_results(vector_greater_than(cols, vals), cols, vals)
コード例 #11
0
ファイル: dump.py プロジェクト: neelsoumya/pythonlib
def dump_orm_object_as_insert_sql(engine: Engine, obj: object,
                                  fileobj: TextIO) -> None:
    """
    Takes a SQLAlchemy ORM object, and writes ``INSERT`` SQL to replicate it
    to the output file-like object.

    Args:
        engine: SQLAlchemy :class:`Engine`
        obj: SQLAlchemy ORM object to write
        fileobj: file-like object to write to
    """
    # literal_query = make_literal_query_fn(engine.dialect)
    insp = inspect(obj)
    # insp: an InstanceState
    # http://docs.sqlalchemy.org/en/latest/orm/internals.html#sqlalchemy.orm.state.InstanceState  # noqa
    # insp.mapper: a Mapper
    # http://docs.sqlalchemy.org/en/latest/orm/mapping_api.html#sqlalchemy.orm.mapper.Mapper  # noqa

    # Don't do this:
    #   table = insp.mapper.mapped_table
    # Do this instead. The method above gives you fancy data types like list
    # and Arrow on the Python side. We want the bog-standard datatypes drawn
    # from the database itself.
    meta = MetaData(bind=engine)
    table_name = insp.mapper.mapped_table.name
    # log.debug("table_name: {}", table_name)
    table = Table(table_name, meta, autoload=True)
    # log.debug("table: {}", table)

    # NewRecord = quick_mapper(table)
    # columns = table.columns.keys()
    query = select(table.columns)
    # log.debug("query: {}", query)
    for orm_pkcol in insp.mapper.primary_key:
        core_pkcol = table.columns.get(orm_pkcol.name)
        pkval = getattr(obj, orm_pkcol.name)
        query = query.where(core_pkcol == pkval)
    # log.debug("query: {}", query)
    cursor = engine.execute(query)
    row = cursor.fetchone()  # should only be one...
    row_dict = dict(row)
    # log.debug("obj: {}", obj)
    # log.debug("row_dict: {}", row_dict)
    statement = table.insert(values=row_dict)
    # insert_str = literal_query(statement)
    insert_str = get_literal_query(statement, bind=engine)
    writeline_nl(fileobj, insert_str)
コード例 #12
0
 def insert_cleaned_email_full(self, email):
     if not isinstance(email, Email):
         print 'ERROR: input must be of type Email'
         return
     
     email_table = Table("email_full", self.metadata)
     ins_stmt = email_table.insert()
     conn = self.engine.connect()
     conn.execute(ins_stmt, date=email.date,
                           mime_type=email.mime_type,
                           from_addr=email.from_addr,
                           to_addr=email.to_addr,
                           subject=email.subject,
                           raw_body=email.raw_body,
                           body=email.body,
                           all_lines=email.all_lines,
                           one_line=email.one_line,
                           path=email.path) 
コード例 #13
0
 def save_data_to_database(self, df: pd.DataFrame, engine: Engine):
     for idx, row in df.iterrows():
         with engine.connect() as conn:
             t = Table('citation', MetaData(), autoload_with=engine)
             # Delete and reinsert as no upsert command is available
             stmt = t.delete().where(delete_stmt_decisions_with_df(df))
             engine.execute(stmt)
             for k in row['citations'].keys():
                 citation_type_id = CitationType(k).value
                 for citation in row['citations'][k]:
                     stmt = t.insert().values([{
                         "decision_id":
                         str(row['decision_id']),
                         "citation_type_id":
                         citation_type_id,
                         "url":
                         citation.get("url"),
                         "text":
                         citation["text"]
                     }])
                     engine.execute(stmt)
コード例 #14
0
    def save_data_to_database(self, df: pd.DataFrame, engine: Engine):
        with engine.connect() as conn:
            t_person = Table('person', MetaData(), autoload_with=conn)
            t_jud_person = Table('judicial_person',
                                 MetaData(),
                                 autoload_with=conn)

            # Delete and reinsert as no upsert command is available
            stmt = t_jud_person.delete().where(
                delete_stmt_decisions_with_df(df))
            conn.execute(stmt)

            for _, row in df.iterrows():
                if not 'court_composition' in row or row[
                        'court_composition'] is None:
                    continue
                court_composition: CourtComposition = row['court_composition']
                president = court_composition.president
                if president:
                    # create president person
                    president_dict = {
                        "name":
                        president.name.strip(),
                        "is_natural_person":
                        True,
                        "gender":
                        president.gender.value[0] if president.gender else None
                    }
                    stmt = t_person.insert().returning(
                        text("person_id")).values([president_dict])
                    person_id = conn.execute(stmt).fetchone()['person_id']
                    person_dict = {
                        "decision_id": str(row['decision_id']),
                        "person_id": person_id,
                        "judicial_person_type_id": 1,
                        "is_president": True
                    }
                    stmt = t_jud_person.insert().values([person_dict])
                    conn.execute(stmt)

                # create all judges
                for judge in court_composition.judges:
                    if judge == president:  # President is already created above
                        continue
                    judge_dict = {
                        "name": judge.name.strip(),
                        "is_natural_person": True,
                        "gender":
                        judge.gender.value[0] if judge.gender else None
                    }
                    stmt = t_person.insert().returning(
                        text("person_id")).values([judge_dict])
                    person_id = conn.execute(stmt).fetchone()['person_id']
                    person_dict = {
                        "decision_id": str(row['decision_id']),
                        "person_id": person_id,
                        "judicial_person_type_id": 1,
                        "is_president": False
                    }
                    stmt = t_jud_person.insert().values([person_dict])
                    conn.execute(stmt)

                # create all clerks
                for clerk in court_composition.clerks:
                    clerk_dict = {
                        "name": clerk.name.strip(),
                        "is_natural_person": True,
                        "gender":
                        clerk.gender.value[0] if clerk.gender else None
                    }
                    stmt = t_person.insert().returning(
                        text("person_id")).values([clerk_dict])
                    person_id = conn.execute(stmt).fetchone()['person_id']
                    person_dict = {
                        "decision_id": str(row['decision_id']),
                        "person_id": person_id,
                        "judicial_person_type_id": 2,
                        "is_president": False
                    }
                    stmt = t_jud_person.insert().values([person_dict])
                    conn.execute(stmt)

            where = f"WHERE NOT EXISTS (SELECT 1 FROM judicial_person b WHERE a.person_id = b.person_id) " \
                    f"AND NOT EXISTS (SELECT 1 FROM party c WHERE a.person_id = c.person_id)"
            stmt = t_person.delete().where(text(where))
コード例 #15
0
    def save_data_to_database(self, df: pd.DataFrame, engine: Engine):

        if not AbstractPreprocessor._check_write_privilege(engine):
            AbstractPreprocessor.create_dir(self.output_dir, os.getlogin())
            path = Path.joinpath(self.output_dir, os.getlogin(),
                                 datetime.now().isoformat() + '.json')

            with path.open("a") as f:
                df.to_json(f)
            return

        df = df.loc[df['language'] != '--']
        if df.empty:
            return

        df = self.run_tokenizer(df)

        with self.get_engine(self.db_scrc).connect() as conn:
            # Load the different tables
            t = Table('section', MetaData(), autoload_with=engine)
            t_paragraph = Table('paragraph', MetaData(), autoload_with=engine)
            t_num_tokens = Table('num_tokens',
                                 MetaData(),
                                 autoload_with=engine)

            # Delete and reinsert as no upsert command is available. This pattern is used multiple times in this method
            if not isinstance(df, pd.DataFrame) or df.empty:
                # empty dfs are given as dicts, so no need to save
                return
            stmt = t.delete().where(delete_stmt_decisions_with_df(df))
            conn.execute(stmt)
            stmt = t_paragraph.delete().where(
                delete_stmt_decisions_with_df(df))
            conn.execute(stmt)

            for idx, row in df.iterrows():
                if idx % 50 == 0:
                    self.logger.info(f'Saving decision {idx + 1} from chunk')
                if row['sections'] is None or row['sections'].keys is None:
                    continue

                for k in row['sections'].keys():
                    decision_id_str = str(row['decision_id'])
                    if decision_id_str == '':
                        continue
                    section_type_id = k.value
                    # insert section
                    section_dict = {
                        "decision_id": decision_id_str,
                        "section_type_id": section_type_id,
                        "section_text": row['sections'][k]
                    }
                    stmt = t.insert().returning(text("section_id")).values(
                        [section_dict])
                    section_id = conn.execute(stmt).fetchone()['section_id']

                    # Add num tokens
                    tokens_per_section = {
                        'section_id': str(section_id),
                        'num_tokens_spacy': row[k.name + '_spacy'],
                        'num_tokens_bert': row[k.name + '_bert']
                    }

                    stmt = t_num_tokens.insert().values([tokens_per_section])
                    conn.execute(stmt)

                    # Add all paragraphs
                    paragraph_dicts = []
                    for paragraph in row['sections'][k]:
                        paragraph = paragraph.strip()
                        if len(paragraph) == 0: continue
                        paragraph_dict = {
                            'section_id': str(section_id),
                            "decision_id": decision_id_str,
                            'paragraph_text': paragraph,
                            'first_level': None,
                            'second_level': None,
                            'third_level': None
                        }
                        paragraph_dicts.append(paragraph_dict)
                    if len(paragraph_dicts) > 0:
                        stmt = t_paragraph.insert().values(paragraph_dicts)
                        conn.execute(stmt)
コード例 #16
0
ファイル: dump.py プロジェクト: neelsoumya/pythonlib
def dump_table_as_insert_sql(engine: Engine,
                             table_name: str,
                             fileobj: TextIO,
                             wheredict: Dict[str, Any] = None,
                             include_ddl: bool = False,
                             multirow: bool = False) -> None:
    """
    Reads a table from the database, and writes SQL to replicate the table's
    data to the output ``fileobj``.

    Args:
        engine: SQLAlchemy :class:`Engine`
        table_name: name of the table
        fileobj: file-like object to write to
        wheredict: optional dictionary of ``{column_name: value}`` to use as
            ``WHERE`` filters
        include_ddl: if ``True``, include the DDL to create the table as well
        multirow: write multi-row ``INSERT`` statements
    """
    # http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query  # noqa
    # http://docs.sqlalchemy.org/en/latest/faq/sqlexpressions.html
    # http://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/  # noqa
    # https://github.com/plq/scripts/blob/master/pg_dump.py
    log.info("dump_data_as_insert_sql: table_name={}", table_name)
    writelines_nl(fileobj, [
        SEP1,
        sql_comment(f"Data for table: {table_name}"),
        SEP2,
        sql_comment(f"Filters: {wheredict}"),
    ])
    dialect = engine.dialect
    if not dialect.supports_multivalues_insert:
        multirow = False
    if multirow:
        log.warning("dump_data_as_insert_sql: multirow parameter substitution "
                    "not working yet")
        multirow = False

    # literal_query = make_literal_query_fn(dialect)

    meta = MetaData(bind=engine)
    log.debug("... retrieving schema")
    table = Table(table_name, meta, autoload=True)
    if include_ddl:
        log.debug("... producing DDL")
        dump_ddl(table.metadata,
                 dialect_name=engine.dialect.name,
                 fileobj=fileobj)
    # NewRecord = quick_mapper(table)
    # columns = table.columns.keys()
    log.debug("... fetching records")
    # log.debug("meta: {}", meta)  # obscures password
    # log.debug("table: {}", table)
    # log.debug("table.columns: {!r}", table.columns)
    # log.debug("multirow: {}", multirow)
    query = select(table.columns)
    if wheredict:
        for k, v in wheredict.items():
            col = table.columns.get(k)
            query = query.where(col == v)
    # log.debug("query: {}", query)
    cursor = engine.execute(query)
    if multirow:
        row_dict_list = []
        for r in cursor:
            row_dict_list.append(dict(r))
        # log.debug("row_dict_list: {}", row_dict_list)
        if row_dict_list:
            statement = table.insert().values(row_dict_list)
            # log.debug("statement: {!r}", statement)
            # insert_str = literal_query(statement)
            insert_str = get_literal_query(statement, bind=engine)
            # NOT WORKING FOR MULTIROW INSERTS. ONLY SUBSTITUTES FIRST ROW.
            writeline_nl(fileobj, insert_str)
        else:
            writeline_nl(fileobj, sql_comment("No data!"))
    else:
        found_one = False
        for r in cursor:
            found_one = True
            row_dict = dict(r)
            statement = table.insert(values=row_dict)
            # insert_str = literal_query(statement)
            insert_str = get_literal_query(statement, bind=engine)
            # log.debug("row_dict: {}", row_dict)
            # log.debug("insert_str: {}", insert_str)
            writeline_nl(fileobj, insert_str)
        if not found_one:
            writeline_nl(fileobj, sql_comment("No data!"))
    writeline_nl(fileobj, SEP2)
    log.debug("... done")
コード例 #17
0
class SqlDatabaseService(ApplicationSession):
    """An application component for logging telemetry data to sql databases"""

    def __init__(self, config):
        ApplicationSession.__init__(self, config)
        self.count = 0
        self.engine = None


        metadata = MetaData()
        self.telemetry = Table("telemetry", metadata,
            Column("id", Integer(), primary_key=True),
            Column("MSG_ID", Integer()),
            Column("V_FC", Integer()),
            Column("V_CAP", Integer()),
            Column("A_ENG", Integer()),
            Column("A_CAP", Integer()),
            Column("T_O2_In", Integer()),
            Column("T_O2_Out", Integer()),
            Column("T_FC_H2O_Out", Integer()),
            Column("Water_In", Integer()),
            Column("Water_Out", Integer()),
            Column("Master_SW", Integer()),
            Column("CAP_Down_SW", Integer()),
            Column("Drive_SW", Integer()),
            Column("FC_state", Integer()),
            Column("Mosfet_state", Integer()),
            Column("Safety_state", Integer()),
            Column("Air_Pump_load", Numeric()),
            Column("Mosfet_load", Integer()),
            Column("Water_Pump_load", Integer()),
            Column("Fan_load", Integer()),
            Column("Acc_X", Integer()),
            Column("Acc_Y", Integer()),
            Column("Acc_Z", Integer()),
            Column("AUX", Numeric()),
            Column("GPS_X", Integer()),
            Column("GPS_Y", Integer()),
            Column("GPS_Z", Integer()),
            Column("GPS_Speed", Integer()),
            Column("V_Safety", Integer()),
            Column("H2_Level", Integer()),
            Column("O2_calc", Numeric()),
            Column("lat", Numeric()),
            Column("lng", Numeric()),
            )


#        metadata = MetaData()
#        self.telemetry = Table("telemetry", metadata,
#            Column("id", Integer(), primary_key=True),
#            Column("mma_x", Integer()),
#            Column("mma_y", Integer()),
#            Column("temp", Numeric()),
#	    Column("lat", Numeric()),
#	    Column("lng", Numeric()),
#        )

    #@inlineCallbacks
    def onJoin(self, details):
        print("Realm joined (WAMP session started).")

        # subscribe to telemetry data channel
        self.subscribe(self.receive, u'de.elmyra.kotori.telemetry.data')

        self.startDatabase()

        #self.leave()

    #@inlineCallbacks
    def startDatabase(self):
        self.engine = create_engine(

            # sqlite in-memory
            #"sqlite://", reactor=reactor, strategy=TWISTED_STRATEGY

            # sqlite on filesystem
            "sqlite:////tmp/kotori.sqlite", reactor=reactor, strategy=TWISTED_STRATEGY

            # mysql... todo
        )

        # Create the table
        yield self.engine.execute(CreateTable(self.telemetry))
        #yield self.engine

    def onLeave(self, details):
        print("Realm left (WAMP session ended).")
        ApplicationSession.onLeave(self, details)

    def onDisconnect(self):
        print("Transport disconnected.")
        #reactor.stop()

    #@inlineCallbacks
    def receive(self, data):
        #print "RECEIVE:", data

        # decode wire data
        payload = data.split(';')
        MSG_ID          = int(payload[0])
        V_FC            = int(payload[1])
        V_CAP           = int(payload[2])
        A_ENG           = int(payload[3])
        A_CAP           = int(payload[4])
        T_O2_In         = int(payload[5])
        T_O2_Out        = int(payload[6])
        T_FC_H2O_Out    = int(payload[7])
        Water_In        = int(payload[8])
        Water_Out       = int(payload[9])
        Master_SW       = int(payload[10])
        CAP_Down_SW     = int(payload[11])
        Drive_SW        = int(payload[12])
        FC_state        = int(payload[13])
        Mosfet_state    = int(payload[14])
        Safety_state    = int(payload[15])
        Air_Pump_load   = float(payload[16])
        Mosfet_load     = int(payload[17])
        Water_Pump_load = int(payload[18])
        Fan_load        = int(payload[19])
        Acc_X           = int(payload[20])
        Acc_Y           = int(payload[21])
        Acc_Z           = int(payload[22])
        AUX             = float(payload[23])
        GPS_X           = int(payload[24])
        GPS_Y           = int(payload[25])
        GPS_Z           = int(payload[26])
        GPS_Speed       = int(payload[27])
        V_Safety        = int(payload[28])
        H2_Level        = int(payload[29])
        O2_calc         = float(payload[30])
        lat             = float(payload[31])
        lng             = float(payload[32])




#        mma_x = int(payload[0])
#        mma_y = int(payload[1])
#        temp = float(payload[2])
#        try:
#            lat = float(payload[3])
#            lng = float(payload[4])
#        except:
#            lat = 0
#            lng = 0

        # store data to database
        if self.engine:
            yield self.engine.execute(self.telemetry.insert().values(MSG_ID = MSG_ID, V_FC = V_FC, V_CAP = V_CAP, A_ENG = A_ENG, A_CAP = A_CAP, T_O2_In = T_O2_In, T_O2_Out = T_O2_Out, T_FC_H2O_Out = T_FC_H2O_Out, Water_In = Water_In, Water_Out = Water_Out, Master_SW = Master_SW, CAP_Down_SW = CAP_Down_SW, Drive_SW = Drive_SW, FC_state = FC_state, Mosfet_state = Mosfet_state, Safety_state = Safety_state, Air_Pump_load = Air_Pump_load, Mosfet_load = Mosfet_load, Water_Pump_load = Water_Pump_load, Fan_load = Fan_load, Acc_X = Acc_X, Acc_Y = Acc_Y, Acc_Z = Acc_Z, AUX = AUX, GPS_X = GPS_X, GPS_Y = GPS_Y, GPS_Z = GPS_Z, GPS_Speed = GPS_Speed, V_Safety = V_Safety, H2_Level = H2_Level, lat = lat, lng = lng))
    def save_data_to_database(self, df: pd.DataFrame, engine: Engine):
        """
            'plaintiff': 1,
            'defendant': 2,
            'representation_plaintiff': 3,
            'representation_defendant': 4
        """
        for idx, row in df.iterrows():
            with engine.connect() as conn:
                t_person = Table('person', MetaData(), autoload_with=conn)
                t_party = Table('party', MetaData(), autoload_with=conn)

                # Delete person
                # Delete and reinsert as no upsert command is available
                stmt = t_party.delete().where(delete_stmt_decisions_with_df(df))
                conn.execute(stmt)

                parties = json.loads(row['parties'])

                # create all plaintiffs
                offset = 0
                for plaintiff in parties.get('plaintiffs'):
                    # Insert person into the person table
                    plaintiff_dict = {"name": plaintiff['name'].strip(),
                                      "is_natural_person": plaintiff['legal_type'] == 'natural person',
                                      "gender": plaintiff['gender']}
                    stmt = t_person.insert().returning(column("person_id")).values([plaintiff_dict])
                    person_id = conn.execute(stmt).fetchone()['person_id']
                    # Then insert the person into the party table
                    stmt = t_party.insert().values(
                        [{"decision_id": str(row['decision_id']), "person_id": person_id, "party_type_id": 1 + offset}])
                    conn.execute(stmt)
                    for representant in plaintiff.get('legal_counsel'):
                        # Insert their representation into the person and party tables
                        representant_dict = {"name": representant['name'].strip(),
                                             "is_natural_person": representant['legal_type'] == 'natural person',
                                             "gender": representant['gender']}
                        stmt = t_person.insert().returning(text("person_id")).values([representant_dict])
                        person_id = conn.execute(stmt).fetchone()['person_id']
                        stmt = t_party.insert().values([{"decision_id": str(row['decision_id']), "person_id": person_id,
                                                         "party_type_id": 3 + offset}])
                        conn.execute(stmt)

                # create all defendants
                offset = 1
                for defendant in parties.get('defendants'):
                    # Insert person into the person table
                    defendant_dict = {"name": plaintiff['name'].strip(),
                                      "is_natural_person": plaintiff['legal_type'] == 'natural person',
                                      "gender": plaintiff['gender']}
                    stmt = t_person.insert().returning(text("person_id")).values([defendant_dict])
                    person_id = conn.execute(stmt).fetchone()['person_id']
                    # Then insert the person into the party table
                    stmt = t_party.insert().values(
                        [{"decision_id": str(row['decision_id']), "person_id": person_id, "party_type_id": 1 + offset}])
                    conn.execute(stmt)
                    for representant in defendant.get('legal_counsel'):
                        # Insert their representation into the person and party tables
                        representant_dict = {"name": representant['name'].strip(),
                                             "is_natural_person": representant['legal_type'] == 'natural person',
                                             "gender": representant['gender']}
                        stmt = t_person.insert().returning(text("person_id")).values([representant_dict])
                        person_id = conn.execute(stmt).fetchone()['person_id']
                        stmt = t_party.insert().values([{"decision_id": str(row['decision_id']), "person_id": person_id,
                                                         "party_type_id": 3 + offset}])
                        conn.execute(stmt)

        with engine.connect() as conn:
            stmt = t_person.delete().where(text(
                f"NOT EXISTS (SELECT FROM judicial_person jp WHERE jp.person_id = person.person_id UNION SELECT FROM party WHERE party.person_id = person.person_id)"))
            conn.execute(stmt)
コード例 #19
0
ファイル: sql_orm.py プロジェクト: RelaxBin/notes
teacher = Table("teacher",meta_data,atuo_load= True)

data = [
    {
        "name":"teacher_1",
        "age":random.randint(20,40),
    },
    {
        "name":"teacher_2",
        "age":random.randint(20,40),
        "sex":1
    }
]

connect.execute(teacher.insert(),data)

## 查询的方式

results = session.query(Student).filter(Student.age == 12).filter(Student.sex == 1).all()
for result in results:
    print(result.age,"-------",result.name)

## 更新
student = session.query(Student).filter(Student.id ==3 )
if student is not None :
    student.name = "July"
    session.commit()


student =  session.query(Student).filter(Student.id == 4)
コード例 #20
0
def populate(target: Table, connection: Connection, **kwargs):
    connection.execute(target.insert(), *rows)
コード例 #21
0
    def test_iso_datetime_field(self) -> None:
        id_colname = "id"
        dt_local_colname = "dt_local"
        dt_utc_colname = "dt_utc"
        iso_colname = "iso"
        id_col = Column(id_colname, Integer, primary_key=True)
        dt_local_col = Column(dt_local_colname, DateTime)
        dt_utc_col = Column(dt_utc_colname, DateTime)
        iso_col = Column(iso_colname, PendulumDateTimeAsIsoTextColType)

        table = Table("testtable", self.meta, id_col, dt_local_col, dt_utc_col,
                      iso_col)
        table.create()

        now = Pendulum.now()
        now_utc = now.in_tz(pendulum.UTC)
        yesterday = now.subtract(days=1)
        yesterday_utc = yesterday.in_tz(pendulum.UTC)

        table.insert().values([
            {
                id_colname: 1,
                dt_local_colname: now,
                dt_utc_colname: now_utc,
                iso_colname: now,
            },
            {
                id_colname: 2,
                dt_local_colname: yesterday,
                dt_utc_colname: yesterday_utc,
                iso_colname: yesterday,
            },
        ]).execute()
        select_fields = [
            id_col,
            dt_local_col,
            dt_utc_col,
            iso_col,
            func.length(dt_local_col).label("len_dt_local_col"),
            func.length(dt_utc_col).label("len_dt_utc_col"),
            func.length(iso_col).label("len_iso_col"),
            isotzdatetime_to_utcdatetime(iso_col).label("iso_to_utcdt"),
            unknown_field_to_utcdatetime(dt_utc_col).label(
                "uk_utcdt_to_utcdt"),
            unknown_field_to_utcdatetime(iso_col).label("uk_iso_to_utc_dt"),
        ]
        rows = list(
            select(select_fields).select_from(table).order_by(
                id_col).execute())
        self._assert_dt_equal(rows[0][dt_local_col], now)
        self._assert_dt_equal(rows[0][dt_utc_col], now_utc)
        self._assert_dt_equal(rows[0][iso_colname], now)
        self._assert_dt_equal(rows[0]["iso_to_utcdt"], now_utc)
        self._assert_dt_equal(rows[0]["uk_utcdt_to_utcdt"], now_utc)
        self._assert_dt_equal(rows[0]["uk_iso_to_utc_dt"], now_utc)
        self._assert_dt_equal(rows[1][dt_local_col], yesterday)
        self._assert_dt_equal(rows[1][dt_utc_col], yesterday_utc)
        self._assert_dt_equal(rows[1][iso_colname], yesterday)
        self._assert_dt_equal(rows[1]["iso_to_utcdt"], yesterday_utc)
        self._assert_dt_equal(rows[1]["uk_utcdt_to_utcdt"], yesterday_utc)
        self._assert_dt_equal(rows[1]["uk_iso_to_utc_dt"], yesterday_utc)