ALL_TABLES = [
    TABLE_STATES,
    TABLE_EVENTS,
    TABLE_RECORDER_RUNS,
    TABLE_SCHEMA_CHANGES,
    TABLE_STATISTICS,
    TABLE_STATISTICS_META,
    TABLE_STATISTICS_RUNS,
    TABLE_STATISTICS_SHORT_TERM,
]

DATETIME_TYPE = DateTime(timezone=True).with_variant(
    mysql.DATETIME(timezone=True, fsp=6), "mysql")
DOUBLE_TYPE = (Float().with_variant(
    mysql.DOUBLE(asdecimal=False),
    "mysql").with_variant(oracle.DOUBLE_PRECISION(),
                          "oracle").with_variant(postgresql.DOUBLE_PRECISION(),
                                                 "postgresql"))


class Events(Base):  # type: ignore
    """Event history data."""

    __table_args__ = (
        # Used for fetching events at a specific time
        # see logbook
        Index("ix_events_event_type_time_fired", "event_type", "time_fired"),
        {
            "mysql_default_charset": "utf8mb4",
            "mysql_collate": "utf8mb4_unicode_ci"
        },
# Import Table, Column, String, Integer, Float, Boolean from sqlalchemy
from sqlalchemy import Table, Column, String, Integer, Float, Boolean

# Define a new table with a name, count, amount, and valid column: data
data = Table('data', metadata, Column('name', String(255)),
             Column('count', Integer()), Column('amount', Float()),
             Column('valid', Boolean()))

# Use the metadata to create the table
metadata.create_all(engine)

# Print table details
print(repr(engine.table_names()))
예제 #3
0
class QueueItem(Base):
    """
    
    queue_weight - rational and description
    The id will never change, but the queue will be sorted by this weight
    An admin can change the order of the queue,
    The algorithum with list the queed items in weight order and set the
    new weight to be inbetween the prev,next queued item values.
    This supports order but without changing the primary key
    To ensure the queue_weight is set approriately, a pre save handler will
    query the to find an approriate values on first save
    
    
    """
    __tablename__   = "queue"

    id              = Column(Integer(),        primary_key=True)
    
    track_id        = Column(String(),    ForeignKey('track.id'), nullable=False)
    
    performer_name  = Column(Unicode(),   nullable=True, default="Untitled")
    session_owner   = Column(Unicode(),   nullable=True)
    
    queue_weight    = Column(Float()  ,   index=True, nullable=False) # this by default is set to the id on first save,
    
    time_added      = Column(DateTime(),  nullable=False, default=now)
    time_touched    = Column(DateTime(),  nullable=False, default=now)
    
    status          = Column(_queueitem_statuss ,  nullable=False, default="pending")
    
    # AllanC - there is no linking of the models now. Track and Queue are linked at an API level and can be in two separate DB's
    #track           = relationship("Track", backref=backref('queued')) # # AllanC - this makes the queue aware of the track and tightens coupleling ... ultimatelty the que and tracks should be in differnt db's but then the sqla links wont function ... think neo!!
    
    __to_dict__ = copy.deepcopy(Base.__to_dict__)
    __to_dict__.update({
        'default': {
    #Base.to_dict_setup(self, list_type='default', field_processors={
            'id'            : None ,
            'track_id'      : None ,
            'performer_name': None ,
            'time_touched'  : None ,
            'time_added'    : None ,
            'queue_weight'  : None ,
        },
    })
    
    __to_dict__.update({'full': copy.deepcopy(__to_dict__['default'])})
    __to_dict__['full'].update({
            'status'       : None ,
            'session_owner': None ,
            #'track'       : lambda queue_item: queue_item.track.to_dict(include_fields='attachments'),
            #'image'       : lambda queue_item: single_image(queue_item),    # AllanC - if you use this ensure you have setup eager loading on your query
    })
    
    @staticmethod
    def new_weight(DBSession):
        """
        Find the biggest queue weight possible and increment.
        Used when creating new queue items or moving items in the list beyond the last element
        """
        try:
            (max_weight,) = DBSession.query(QueueItem.queue_weight).order_by(QueueItem.queue_weight.desc()).limit(1).one()
        except NoResultFound:
            max_weight = 0.0
        return max_weight + 1.0

    @staticmethod
    def before_insert_listener(mapper, connection, target):
        """
        Event to set weight of queued item before first commit.
        Find the maximum 'weight' in the database
        This queue item weight will be set to max_weight + 1.0
        """
        if not target.queue_weight:
            target.queue_weight = QueueItem.new_weight(Session(bind=connection))
예제 #4
0
def load_unicode_test_data(only_metadata=False, force=False):
    """Loading unicode test dataset from a csv file in the repo"""
    tbl_name = "unicode_test"
    database = utils.get_example_database()
    table_exists = database.has_table_by_name(tbl_name)

    if not only_metadata and (not table_exists or force):
        data = get_example_data("unicode_utf8_unixnl_test.csv",
                                is_gzip=False,
                                make_bytes=True)
        df = pd.read_csv(data, encoding="utf-8")
        # generate date/numeric data
        df["dttm"] = datetime.datetime.now().date()
        df["value"] = [random.randint(1, 100) for _ in range(len(df))]
        df.to_sql(  # pylint: disable=no-member
            tbl_name,
            database.get_sqla_engine(),
            if_exists="replace",
            chunksize=500,
            dtype={
                "phrase": String(500),
                "short_phrase": String(10),
                "with_missing": String(100),
                "dttm": Date(),
                "value": Float(),
            },
            index=False,
        )
        print("Done loading table!")
        print("-" * 80)

    print("Creating table [unicode_test] reference")
    obj = db.session.query(TBL).filter_by(table_name=tbl_name).first()
    if not obj:
        obj = TBL(table_name=tbl_name)
    obj.main_dttm_col = "dttm"
    obj.database = database
    db.session.merge(obj)
    db.session.commit()
    obj.fetch_metadata()
    tbl = obj

    slice_data = {
        "granularity_sqla": "dttm",
        "groupby": [],
        "metric": {
            "aggregate": "SUM",
            "column": {
                "column_name": "value"
            },
            "expressionType": "SIMPLE",
            "label": "Value",
        },
        "row_limit": config.get("ROW_LIMIT"),
        "since": "100 years ago",
        "until": "now",
        "where": "",
        "viz_type": "word_cloud",
        "size_from": "10",
        "series": "short_phrase",
        "size_to": "70",
        "rotation": "square",
        "limit": "100",
    }

    print("Creating a slice")
    slc = Slice(
        slice_name="Unicode Cloud",
        viz_type="word_cloud",
        datasource_type="table",
        datasource_id=tbl.id,
        params=get_slice_json(slice_data),
    )
    merge_slice(slc)

    print("Creating a dashboard")
    dash = db.session.query(Dash).filter_by(slug="unicode-test").first()

    if not dash:
        dash = Dash()
    js = """\
{
    "CHART-Hkx6154FEm": {
        "children": [],
        "id": "CHART-Hkx6154FEm",
        "meta": {
            "chartId": 2225,
            "height": 30,
            "sliceName": "slice 1",
            "width": 4
        },
        "type": "CHART"
    },
    "GRID_ID": {
        "children": [
            "ROW-SyT19EFEQ"
        ],
        "id": "GRID_ID",
        "type": "GRID"
    },
    "ROOT_ID": {
        "children": [
            "GRID_ID"
        ],
        "id": "ROOT_ID",
        "type": "ROOT"
    },
    "ROW-SyT19EFEQ": {
        "children": [
            "CHART-Hkx6154FEm"
        ],
        "id": "ROW-SyT19EFEQ",
        "meta": {
            "background": "BACKGROUND_TRANSPARENT"
        },
        "type": "ROW"
    },
    "DASHBOARD_VERSION_KEY": "v2"
}
    """
    dash.dashboard_title = "Unicode Test"
    pos = json.loads(js)
    update_slice_ids(pos, [slc])
    dash.position_json = json.dumps(pos, indent=4)
    dash.slug = "unicode-test"
    dash.slices = [slc]
    db.session.merge(dash)
    db.session.commit()
예제 #5
0
class Asset_table(Base):
    __tablename__ = 'asset_orm'
    asset_id = Column(Integer(), primary_key=True)
    user_id = Column(Integer(), index=True, nullable=False)
    balance = Column(Float(), index=True, nullable=False)
예제 #6
0
class Point(object):
    id = Column(Integer, primary_key=True)
    latitude = Column(Float())
    longitude = Column(Float())
예제 #7
0
class Filingtype(RefTypeMixin, AuditMixin, Model):
    __tablename__ = 'filingtype'

    id = Column(Integer, primary_key=True, autoincrement=True)
    cost = Column(Float(53), nullable=False)
    perpagecost = Column(Float(53), nullable=False)
예제 #8
0

def buscarId(matriz, identificador):
    for valores in matriz:
        if valores.id == identificador:
            return valores
    return False


metadata = MetaData()
#Estructura de la tabla todo
todo = Table(
    'todo',
    metadata,
    Column('id', Integer, primary_key=True),
    Column('temperatura', Float()),
    Column('humedad', Float()),
    Column('canal1', Float()),
    Column('canal2', Float()),
    Column('canal3', Float()),
    Column('canal4', Float()),
    Column('tempGabinete', Float()),
    Column('hora', Time()),
    Column('fecha', Date()),
)

#Estructura de la tabla configuracion
configuracion = Table('configuracion', metadata,
                      Column('id', Integer, primary_key=True),
                      Column('tipo', Integer), Column('frec', Integer),
                      Column('potmax', Float()), Column('potmin', Integer()),
예제 #9
0
def load_energy():
    """Loads an energy related dataset to use with sankey and graphs"""
    tbl_name = 'energy_usage'
    with gzip.open(os.path.join(DATA_FOLDER, 'energy.json.gz')) as f:
        pdf = pd.read_json(f)
    pdf.to_sql(
        tbl_name,
        db.engine,
        if_exists='replace',
        chunksize=500,
        dtype={
            'source': String(255),
            'target': String(255),
            'value': Float(),
        },
        index=False)

    print("Creating table [wb_health_population] reference")
    tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
    if not tbl:
        tbl = TBL(table_name=tbl_name)
    tbl.description = "Energy consumption"
    tbl.database = get_or_create_main_db()
    db.session.merge(tbl)
    db.session.commit()
    tbl.fetch_metadata()

    slc = Slice(
        slice_name="Energy Sankey",
        viz_type='sankey',
        datasource_type='table',
        datasource_id=tbl.id,
        params=textwrap.dedent("""\
        {
            "collapsed_fieldsets": "",
            "groupby": [
                "source",
                "target"
            ],
            "having": "",
            "metric": "sum__value",
            "row_limit": "5000",
            "slice_name": "Energy Sankey",
            "viz_type": "sankey",
            "where": ""
        }
        """)
    )
    misc_dash_slices.append(slc.slice_name)
    merge_slice(slc)

    slc = Slice(
        slice_name="Energy Force Layout",
        viz_type='directed_force',
        datasource_type='table',
        datasource_id=tbl.id,
        params=textwrap.dedent("""\
        {
            "charge": "-500",
            "collapsed_fieldsets": "",
            "groupby": [
                "source",
                "target"
            ],
            "having": "",
            "link_length": "200",
            "metric": "sum__value",
            "row_limit": "5000",
            "slice_name": "Force",
            "viz_type": "directed_force",
            "where": ""
        }
        """)
    )
    misc_dash_slices.append(slc.slice_name)
    merge_slice(slc)

    slc = Slice(
        slice_name="Heatmap",
        viz_type='heatmap',
        datasource_type='table',
        datasource_id=tbl.id,
        params=textwrap.dedent("""\
        {
            "all_columns_x": "source",
            "all_columns_y": "target",
            "canvas_image_rendering": "pixelated",
            "collapsed_fieldsets": "",
            "having": "",
            "linear_color_scheme": "blue_white_yellow",
            "metric": "sum__value",
            "normalize_across": "heatmap",
            "slice_name": "Heatmap",
            "viz_type": "heatmap",
            "where": "",
            "xscale_interval": "1",
            "yscale_interval": "1"
        }
        """)
    )
    misc_dash_slices.append(slc.slice_name)
    merge_slice(slc)
예제 #10
0
class Auction(Base):
    __tablename__ = 'auctions'

    aid = Column(Integer,
                 primary_key=True,
                 server_default=text("nextval('auctions_aid_seq'::regclass)"))
    iid = Column(ForeignKey('item.iid'))
    startingbid = Column(Float(53))
    adeadline = Column(String(8))

    item = relationship('Item')


t_bids = Table('bids', metadata, Column('aid', ForeignKey('auctions.aid')),
               Column('uid', ForeignKey('artuser.uid')),
               Column('bid', Float(53)))


class Creditcard(Base):
    __tablename__ = 'creditcard'

    cid = Column(
        Integer,
        primary_key=True,
        server_default=text("nextval('creditcard_cid_seq'::regclass)"))
    uid = Column(ForeignKey('artuser.uid'))
    cname = Column(String(15))
    cnumber = Column(Integer)
    ctype = Column(String(15))
    expdate = Column(String(8))
예제 #11
0
class Wallet(Base):
    __tablename__ = 'wallet'

    id = Column(String(50), primary_key=True)
    balance = Column(Float(50), nullable=False)
    coin_symbol = Column(String(10), nullable=False)
예제 #12
0
    Track.c.size,
    unique=True,
)

TrackStore = table(
    'track_storage',
    Column('name', String(1024), nullable=False),
)

TrackLocation = table(
    'track_location',
    Column('track', None, ForeignKey('track.uuid'), nullable=False),
    Column('location', String(4096), nullable=False),
    Column('store', None, ForeignKey('track_storage.uuid'), nullable=False),
)

Play = table(
    'play',
    Column('track', None, ForeignKey('track.uuid'), nullable=False),
    Column('mood', String(1024), nullable=False),
    Column('played_seconds', Float(), nullable=False),
    Column('positive_feedback', Boolean(), nullable=True),
    Column('reinforced_at_time', DateTime(), nullable=True),
)

State = Table(
    'state',
    metadata,
    Column('mood', String(1024), nullable=True),
)
예제 #13
0
class CareerPath(DBBASE):
    """
    Different career path stages
    """
    __colanderalchemy_config__ = {
        'title': u"Etape de parcours",
        'help_msg': u"",
        'validation_msg': u"L'étape de parcours a bien été enregistrée",
        'widget': deform_extensions.GridFormWidget(named_grid=CAREER_PATH_GRID)
    }
    __tablename__ = 'career_path'
    __table_args__ = default_table_args
    id = Column(
        'id',
        Integer,
        primary_key=True,
        info={'colanderalchemy': {
            'widget': deform.widget.HiddenWidget()
        }},
    )
    userdatas_id = Column(
        ForeignKey('user_datas.id'),
        info={
            'colanderalchemy': {
                'exclude': True
            },
            'export': {
                'label': u"Identifiant Autonomie",
                'stats': {
                    'exclude': True
                },
            }
        },
    )
    userdatas = relationship('UserDatas',
                             info={
                                 'colanderalchemy': {
                                     'exclude': True
                                 },
                                 'export': {
                                     'exclude': True
                                 },
                             })
    start_date = Column(Date(),
                        nullable=False,
                        info={'colanderalchemy': {
                            'title': u"Date d'effet"
                        }})
    end_date = Column(Date(),
                      info={'colanderalchemy': {
                          'title': u"Date d'échéance"
                      }})
    career_stage_id = Column(
        ForeignKey('career_stage.id'),
        info={'colanderalchemy': {
            'title': u"Type d'étape"
        }})
    career_stage = relationship(
        'CareerStage',
        primaryjoin='CareerStage.id==CareerPath.career_stage_id',
        info={
            'colanderalchemy':
            get_excluded_colanderalchemy(u'Etape de parcours'),
            'export': {
                'related_key': 'label'
            },
        },
    )
    cae_situation_id = Column(
        ForeignKey('cae_situation_option.id'),
        info={
            'colanderalchemy': {
                'title':
                u"Nouvelle situation dans la CAE",
                'description':
                u"Lorsque cette étape sera affectée à un \
porteur cette nouvelle situation sera proposée par défaut"
            }
        })
    cae_situation = relationship(
        'CaeSituationOption',
        primaryjoin='CaeSituationOption.id==CareerPath.cae_situation_id',
        info={
            'colanderalchemy':
            get_excluded_colanderalchemy(u'Situation dans la CAE'),
            'export': {
                'related_key': 'label'
            },
        },
    )
    stage_type = Column(String(15),
                        info={
                            'colanderalchemy': {
                                'title': u"Type d'étape"
                            },
                            'export': {
                                'formatter':
                                lambda val: dict(STAGE_TYPE_OPTIONS).get(val),
                                'stats': {
                                    'options': STAGE_TYPE_OPTIONS
                                },
                            }
                        })
    type_contrat_id = Column(
        ForeignKey('type_contrat_option.id'),
        info={'colanderalchemy': {
            'title': u"Type de contrat"
        }})
    type_contrat = relationship(
        'TypeContratOption',
        info={
            'colanderalchemy':
            get_excluded_colanderalchemy(u'Type de contrat'),
            'export': {
                'related_key': 'label'
            },
        },
    )
    employee_quality_id = Column(
        ForeignKey('employee_quality_option.id'),
        info={'colanderalchemy': {
            'title': u"Qualité du salarié"
        }})
    employee_quality = relationship(
        'EmployeeQualityOption',
        info={
            'colanderalchemy':
            get_excluded_colanderalchemy(u"Qualité du salarié"),
            'export': {
                'related_key': 'label'
            },
        })
    taux_horaire = Column(Float(),
                          info={'colanderalchemy': {
                              'title': u"Taux horaire"
                          }})
    num_hours = Column(Float(),
                       info={'colanderalchemy': {
                           'title': u"Nombre d'heures"
                       }})
    goals_amount = Column(
        Float(),
        info={'colanderalchemy': {
            'title': u"Objectif de CA / d'activité"
        }})
    goals_period = Column(String(15),
                          info={
                              'colanderalchemy': {
                                  'title': u"Période de l'objectif"
                              },
                              'export': {
                                  'formatter':
                                  lambda val: dict(PERIOD_OPTIONS).get(val),
                                  'stats': {
                                      'options': PERIOD_OPTIONS
                                  },
                              }
                          })
    amendment_number = Column(
        Integer(), info={'colanderalchemy': {
            'title': u"Numéro de l'avenant"
        }})
    type_sortie_id = Column(
        ForeignKey('type_sortie_option.id'),
        info={'colanderalchemy': {
            'title': u"Type de sortie"
        }})
    type_sortie = relationship(
        'TypeSortieOption',
        info={
            'colanderalchemy': get_excluded_colanderalchemy(u"Type de sortie"),
            'export': {
                'related_key': 'label'
            },
        },
    )
    motif_sortie_id = Column(
        ForeignKey('motif_sortie_option.id'),
        info={'colanderalchemy': {
            'title': u"Motif de sortie"
        }})
    motif_sortie = relationship(
        'MotifSortieOption',
        info={
            'colanderalchemy':
            get_excluded_colanderalchemy(u"Motif de sortie"),
            'export': {
                'related_key': 'label'
            },
        },
    )

    @classmethod
    def query(cls, user):
        q = super(CareerPath, cls).query()
        q = q.filter(CareerPath.userdatas_id == user)
        return q.order_by(CareerPath.start_date.desc())
예제 #14
0
from sqlalchemy import Table, Column, String, Integer, Float, Boolean, MetaData, create_engine

engine = create_engine('mysql+pymysql://root:1234@localhost:3306/db1')

meta = MetaData()

data = Table('data', meta, Column('name', String(255), unique=True),
             Column('count', Integer(), default=1), Column('amount', Float()),
             Column('valid', Boolean(), default=False))

meta.create_all(engine)

print(repr(meta.tables['data']))
예제 #15
0
 def test_compare_float_no_diff6(self):
     self._compare_default_roundtrip(
         Float(),
         "5", text("5.0"),
         diff_expected=False
     )
예제 #16
0
def load_unicode_test_data():
    """Loading unicode test dataset from a csv file in the repo"""
    df = pd.read_csv(os.path.join(DATA_FOLDER, 'unicode_utf8_unixnl_test.csv'),
                     encoding="utf-8")
    # generate date/numeric data
    df['date'] = datetime.datetime.now().date()
    df['value'] = [random.randint(1, 100) for _ in range(len(df))]
    df.to_sql(  # pylint: disable=no-member
        'unicode_test',
        db.engine,
        if_exists='replace',
        chunksize=500,
        dtype={
            'phrase': String(500),
            'short_phrase': String(10),
            'with_missing': String(100),
            'date': Date(),
            'value': Float(),
        },
        index=False)
    print("Done loading table!")
    print("-" * 80)

    print("Creating table [unicode_test] reference")
    obj = db.session.query(TBL).filter_by(table_name='unicode_test').first()
    if not obj:
        obj = TBL(table_name='unicode_test')
    obj.main_dttm_col = 'date'
    obj.database = get_or_create_main_db()
    db.session.merge(obj)
    db.session.commit()
    obj.fetch_metadata()
    tbl = obj

    slice_data = {
        "granularity": "date",
        "groupby": [],
        "metric": 'sum__value',
        "row_limit": config.get("ROW_LIMIT"),
        "since": "100 years ago",
        "until": "now",
        "where": "",
        "viz_type": "word_cloud",
        "size_from": "10",
        "series": "short_phrase",
        "size_to": "70",
        "rotation": "square",
        "limit": "100",
    }

    print("Creating a slice")
    slc = Slice(
        slice_name="Unicode Cloud",
        viz_type='word_cloud',
        datasource_type='table',
        datasource_id=tbl.id,
        params=get_slice_json(slice_data),
    )
    merge_slice(slc)

    print("Creating a dashboard")
    dash = (
        db.session.query(Dash)
        .filter_by(dashboard_title="Unicode Test")
        .first()
    )

    if not dash:
        dash = Dash()
    pos = {
        "size_y": 4,
        "size_x": 4,
        "col": 1,
        "row": 1,
        "slice_id": slc.id,
    }
    dash.dashboard_title = "Unicode Test"
    dash.position_json = json.dumps([pos], indent=4)
    dash.slug = "unicode-test"
    dash.slices = [slc]
    db.session.merge(dash)
    db.session.commit()
예제 #17
0
class SongStat(schema.Base):
    """ SongStat model, represent the score obtain by a user in a song """

    __tablename__ = 'song_stats'

    GRADES = {
        0: "AAAA",
        1: "AAA",
        2: "AA",
        3: "A",
        4: "B",
        5: "C",
        6: "D",
        7: "F",
    }

    DIFFICULTIES = {
        0: "BEGINNER",
        1: "EASY",
        2: "MEDIUM",
        3: "HARD",
        4: "EXPERT",
    }

    stepid = {
        1: "hit_mine",
        2: "avoid_mine",
        3: "miss",
        4: "bad",
        5: "good",
        6: "great",
        7: "perfect",
        8: "flawless",
        9: "not_held",
        10: "held"
    }

    id = Column(Integer, primary_key=True)

    song_id = Column(Integer, ForeignKey('songs.id'))
    song = relationship("Song", back_populates="stats")

    user_id = Column(Integer, ForeignKey('users.id'))
    user = relationship("User", back_populates="song_stats")

    game_id = Column(Integer, ForeignKey('games.id'))
    game = relationship("Game", back_populates="song_stats")

    hit_mine = Column(Integer, default=0)
    avoid_mine = Column(Integer, default=0)
    miss = Column(Integer, default=0)
    bad = Column(Integer, default=0)
    good = Column(Integer, default=0)
    great = Column(Integer, default=0)
    perfect = Column(Integer, default=0)
    flawless = Column(Integer, default=0)
    not_held = Column(Integer, default=0)
    held = Column(Integer, default=0)

    max_combo = Column(Integer, default=0)
    options = Column(Text, default="")
    score = Column(Integer, default=0)
    grade = Column(Integer, default=0, index=True)
    difficulty = Column(Integer, default=0)
    feet = Column(Integer, default=0, index=True)

    percentage = Column(Float(precision=5))

    duration = Column(Integer, default=0)

    raw_stats = Column(LargeBinary)

    created_at = Column(DateTime, default=datetime.datetime.now)
    updated_at = Column(DateTime, onupdate=datetime.datetime.now)

    def __repr__(self):
        return "<SongStat #%s score=%s (%s%%)>" % (self.id, self.score,
                                                   self.percentage)

    @property
    def lit_difficulty(self):
        """ Difficulty as a string (EASY, MEDIUM, HARD, ...) """

        if self.difficulty is None:
            return "Unknown"

        return self.DIFFICULTIES.get(self.difficulty, str(self.difficulty))

    @property
    def full_difficulty(self):
        """ Difficulty with feet as a string. eg EASY (3) """

        return "%s (%s)" % (self.lit_difficulty, self.feet)

    @property
    def lit_grade(self):
        """ Grade as a string (AA, E, F, B, ...)"""
        if self.grade is None:
            return "Unknown"

        return self.GRADES.get(self.grade, self.grade)

    def pretty_result(self, room_id=None, color=False):
        """ Return a pretty result for the result """

        color_func = with_color if color else lambda x, **_: x

        return "{difficulty}: {user_name} {grade} ({percentage:.2f}%) on {date}".format(
            difficulty=color_func(self.full_difficulty,
                                  color=nick_color(self.lit_difficulty)),
            user_name=self.user.fullname_colored(room_id)
            if color else self.user.fullname(room_id),
            grade=color_func(self.lit_grade),
            percentage=self.percentage,
            date=self.created_at.strftime("%d/%m/%y"))

    def calc_percentage(self, config=None):
        """ Calculate the percentage given the input """

        if not config:
            config = {
                "not_held": 0,
                "miss": 0,
                "bad": 0,
                "good": 0,
                "held": 3,
                "hit_mine": -2,
                "great": 1,
                "perfect": 2,
                "flawless": 3
            }

        max_weight = max(config.values())
        percentage = 0
        nb_note = 0
        for note, weight in config.items():
            nb = getattr(self, note, 0)
            percentage += nb * weight
            nb_note += nb * max_weight

        return percentage / nb_note * 100 if nb_note > 0 else 0

    def calc_xp(self, config=None):
        if not config:
            config = {
                "miss": 0,
                "bad": 1,
                "good": 2,
                "great": 3,
                "perfect": 4,
                "flawless": 5
            }

        xp = 0
        for note, weight in config.items():
            nb = getattr(self, note, 0)
            xp += nb * weight

        return int(xp / len(config))

    @staticmethod
    def encode_stats(raw_data):
        binary = BinaryStats(nb_notes=len(raw_data), stats=[])

        for stats in raw_data:
            binary["stats"].append({
                "grade": stats["grade"],
                "stepid": stats["stepid"],
                "score": stats["score"],
                "combo": stats["combo"],
                "health": stats["health"],
                "time": stats["time"].seconds
            })

        return binary.payload

    @staticmethod
    def decode_stats(binary):
        return BinaryStats.from_payload(binary)["stats"]

    @property
    def stats(self):
        return self.decode_stats(self.raw_stats)

    @property
    def nb_notes(self):
        return sum(getattr(self, note, 0) for note in self.stepid.values())
예제 #18
0
def load_long_lat_data():
    """Loading lat/long data from a csv file in the repo"""
    with gzip.open(os.path.join(DATA_FOLDER, 'san_francisco.csv.gz')) as f:
        pdf = pd.read_csv(f, encoding="utf-8")
    pdf['date'] = datetime.datetime.now().date()
    pdf['occupancy'] = [random.randint(1, 6) for _ in range(len(pdf))]
    pdf['radius_miles'] = [random.uniform(1, 3) for _ in range(len(pdf))]
    pdf.to_sql(  # pylint: disable=no-member
        'long_lat',
        db.engine,
        if_exists='replace',
        chunksize=500,
        dtype={
            'longitude': Float(),
            'latitude': Float(),
            'number': Float(),
            'street': String(100),
            'unit': String(10),
            'city': String(50),
            'district': String(50),
            'region': String(50),
            'postcode': Float(),
            'id': String(100),
            'date': Date(),
            'occupancy': Float(),
            'radius_miles': Float(),
        },
        index=False)
    print("Done loading table!")
    print("-" * 80)

    print("Creating table reference")
    obj = db.session.query(TBL).filter_by(table_name='long_lat').first()
    if not obj:
        obj = TBL(table_name='long_lat')
    obj.main_dttm_col = 'date'
    obj.database = get_or_create_main_db()
    db.session.merge(obj)
    db.session.commit()
    obj.fetch_metadata()
    tbl = obj

    slice_data = {
        "granularity": "day",
        "since": "2014-01-01",
        "until": "now",
        "where": "",
        "viz_type": "mapbox",
        "all_columns_x": "LON",
        "all_columns_y": "LAT",
        "mapbox_style": "mapbox://styles/mapbox/light-v9",
        "all_columns": ["occupancy"],
        "row_limit": 500000,
    }

    print("Creating a slice")
    slc = Slice(
        slice_name="Mapbox Long/Lat",
        viz_type='mapbox',
        datasource_type='table',
        datasource_id=tbl.id,
        params=get_slice_json(slice_data),
    )
    misc_dash_slices.append(slc.slice_name)
    merge_slice(slc)
예제 #19
0
        dt = datetime.strptime(s.strip(), DATEFMT)
    except ValueError:
        # Antelope convention
        dt = UTCDateTime(float(s)).datetime
    return dt


algorithm = Column(String(15),
                   info={
                       'default': '-',
                       'parse': parse_str,
                       'dtype': 'a15',
                       'width': 15,
                       'format': '15.15s'
                   })
amp = Column(Float(24),
             info={
                 'default': -1.0,
                 'parse': parse_float,
                 'dtype': 'float',
                 'width': 11,
                 'format': '11.2f'
             })
ampid = Column(Integer,
               nullable=False,
               info={
                   'default': -1,
                   'parse': parse_int,
                   'dtype': 'int',
                   'width': 9,
                   'format': '9d'
    def __init__(self,
                 name,
                 db,
                 db_schema=None,
                 description=None,
                 generate_days=10,
                 drop_existing=False):

        # constants
        constants = []

        physical_name = name.lower()

        # granularities
        granularities = []

        # columns
        columns = []

        columns.append(Column('TURBINE_ID', String(50)))
        columns.append(Column('TEMPERATURE', Float()))
        columns.append(Column('PRESSURE', Float()))
        columns.append(Column('STEP', Float()))

        # dimension columns
        dimension_columns = []
        dimension_columns.append(Column('CLIENT', String(50)))
        dimension_columns.append(Column('ORGANIZATION', String(50)))
        dimension_columns.append(Column('FUNCTION', String(50)))

        # functions
        functions = []
        # simulation settings
        sim = {
            'freq': '5min',
            'auto_entity_count': 10,
            'data_item_mean': {
                'TEMPERATURE': 22,
                'STEP': 1,
                'PRESSURE': 50,
                'TURBINE_ID': 1
            },
            'data_item_domain': {
                'SITE': ['Riverside MFG', 'Collonade MFG', 'Mariners Way MFG'],
                'ORGANIZATION': [
                    'Engineering', 'Supply Chain', 'Production', 'Quality',
                    'Other'
                ],
                'FUNCTION': [
                    'New Products', 'Packaging', 'Planning', 'Warehouse',
                    'Logistics', 'Customer Service', 'Line 1', 'Line 2',
                    'Quality Control', 'Calibration', 'Reliability'
                ]
            },
            'drop_existing': False
        }
        generator = bif.EntityDataGenerator(ids=None, parameters=sim)
        functions.append(generator)

        # data type for operator cannot be inferred automatically
        # state it explicitly

        output_items_extended_metadata = {}

        super().__init__(
            name=name,
            db=db,
            constants=constants,
            granularities=granularities,
            columns=columns,
            functions=functions,
            dimension_columns=dimension_columns,
            output_items_extended_metadata=output_items_extended_metadata,
            generate_days=generate_days,
            drop_existing=drop_existing,
            description=description,
            db_schema=db_schema)
예제 #21
0
class Measurement(Base, StatusMixin):
    value = Column(Float(32))
    process_info_id = Column(Integer, ForeignKey('status_processinfo.id'))
    pub_date = Column(DateTime, default=func.now())
예제 #22
0
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import Table, Column, Float, Integer, String, Text, DateTime
from services.database import Base, db_session, metadata
import datetime
from sqlalchemy.orm import mapper


class ItemPrice(object):
    query = db_session.query_property()

    def __init__(self, item_type, category, subcategory, price):
        self.item_type = item_type
        self.item_category = category
        self.item_subcategory = subcategory
        self.price = price


item_prices = Table('item_prices',
                    metadata,
                    Column('id', Integer(), primary_key=True),
                    Column('item_type', Integer()),
                    Column('item_category', Integer()),
                    Column('item_subcategory', Integer()),
                    Column('price', Float(), nullable=True),
                    extend_existing=True)
mapper(ItemPrice, item_prices)
예제 #23
0
class GeekTimeCoin(DBORM):
    __tablename__ = 'geektimecoin'
    coin_id = Column(Integer(), primary_key=True, autoincrement=True)
    coin_user_id = Column(Integer(), ForeignKey('geektimeuser.user_id'))
    total_coin = Column(Float(), default=0)
예제 #24
0
class Player(Base):
    __tablename__ = "player"
    player_id = Column(Integer, primary_key=True, autoincrement=True)
    height = Column(Float(3, 2))
예제 #25
0
파일: db.py 프로젝트: Cel1ca/Monkey
    class TextInt(TypeDecorator):
        '''Modify Text type for integers'''
        impl = Text

        def process_bind_param(self, value, dialect):
            return str(value)

        def process_result_value(self, value, dialect):
            return int(value)

    TINY_TYPE = SmallInteger
    MEDIUM_TYPE = Integer
    UNSIGNED_HUGE_TYPE = TextInt
    HUGE_TYPE = Integer
    PRIMARY_HUGE_TYPE = HUGE_TYPE 
    FLOAT_TYPE = Float(asdecimal=False)


class Team(Enum):
    none = 0
    mystic = 1
    valor = 2
    instict = 3


def combine_key(sighting):
    return sighting['encounter_id'], sighting['spawn_id']


class SightingCache:
    """Simple cache for storing actual sightings
예제 #26
0
 def test_compare_float_str(self):
     self._compare_default_roundtrip(
         Float(),
         "5.2",
     )
예제 #27
0
entity type, as well as the functions , constants and granularities that apply to it.

The "widgets" entity type below has 3 input data items. Dataitems are denoted by the
SqlAlchemy column objects company_code, temp and pressure.

It also has a function EntityDataGenerator

The keyword args dict specifies extra properties. The database schema is only
needed if you are not using the default schema. You can also rename the timestamp.

'''
entity_name = 'pump1'
db_schema = None  # replace if you are not using the default schema
db.drop_table(entity_name, schema=db_schema)
entity = EntityType(
    entity_name, db, Column('site', String(50)), Column('temp', Float()),
    Column('pressure', Float()),
    bif.EntityDataGenerator(ids=['A01', 'A02', 'B01'],
                            data_item='is_generated'), **{
                                '_timestamp': 'evt_timestamp',
                                '_db_schema': db_schema
                            })
'''
When creating an EntityType object you will need to specify the name of the entity, the database
object that will contain entity data

After creating an EntityType you will need to register it so that it visible in the UI.
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
예제 #28
0
 def test_compare_float_text(self):
     self._compare_default_roundtrip(
         Float(),
         text("5.2"),
     )
예제 #29
0
    def init_postigis(self):
        """Creates all the tables and schema.
        The table schema is based on the 
        [WazeCCPProcessor](github.com/LouisvilleMetro/WazeCCPProcessor)
        project in order to achieve maximum compatibility.

        It creates a schema: `waze`
        and the tables:

        - jams
        - irregularities
        - alerts
        - roads
        - alert_types
        - coordinates
        - coordinate_type
        """

        if self.force_export:
            try:
                self.engine_postgis.execute('DROP SCHEMA waze CASCADE')
            except:
                pass

        try:
            self.engine_postgis.execute("CREATE SCHEMA waze")  #create db
        except:
            pass

        metadata = sa.MetaData(self.engine_postgis)
        self.tables_postgis = {}

        metadata = sa.MetaData(self.engine_postgis)

        self.tables_postgis['alerts'] = sa.Table(
            'alerts',
            metadata,
            Column(
                "id",
                INTEGER,
                primary_key=True,
            ),
            Column("uuid", Text, nullable=False, index=True),
            Column("pub_millis", BigInteger, nullable=False),
            Column("pub_utc_date", TIMESTAMP, index=True),
            Column("road_type", INTEGER, index=True),
            Column("location", JSON),
            Column("location_geo", Geometry('POINT')),
            Column("street", Text),
            Column("city", Text),
            Column("country", Text),
            Column("magvar", INTEGER),
            Column("reliability", INTEGER, index=True),
            Column("report_description", Text),
            Column("report_rating", INTEGER),
            Column("confidence", INTEGER, index=True),
            Column("type", Text, index=True),
            Column("subtype", Text, index=True),
            Column("report_by_municipality_user", BOOLEAN),
            Column("thumbs_up", INTEGER, index=True),
            Column("jam_uuid", Text, index=True),
            Column("datafile_id", BigInteger, nullable=False, index=True),
            schema='waze',
        )

        self.tables_postgis['jams'] = sa.Table(
            'jams',
            metadata,
            Column("id", INTEGER, primary_key=True, nullable=False),
            Column("uuid", Text, nullable=False, index=True),
            Column("pub_millis", BigInteger, nullable=False),
            Column("pub_utc_date", TIMESTAMP, index=True),
            Column("start_node", Text),
            Column("end_node", Text),
            Column("road_type", INTEGER),
            Column("street", Text, index=True),
            Column("city", Text),
            Column("country", Text),
            Column("delay", INTEGER, index=True),
            Column("speed", Float, index=True),
            Column("speed_kmh", Float, index=True),
            Column("length", INTEGER, index=True),
            Column("turn_type", Text),
            Column("level", INTEGER, index=True),
            Column("blocking_alert_id", Text),
            Column("line", JSON),
            Column("line_geo", Geometry('LINESTRING')),
            Column("type", Text, index=True),
            Column("turn_line", JSON),
            Column("turn_line_geo", Geometry('LINESTRING')),
            Column("datafile_id", BigInteger, nullable=False, index=True),
            schema='waze',
        )

        self.tables_postgis['irregularities'] = sa.Table(
            'irregularities',
            metadata,
            Column("id", INTEGER, primary_key=True, nullable=False),
            Column("uuid", Text, nullable=False, index=True),
            Column("detection_date_millis", BigInteger, nullable=False),
            Column("detection_date", Text),
            Column("detection_utc_date", TIMESTAMP, index=True),
            Column("update_date_millis", BigInteger, nullable=False),
            Column("update_date", Text),
            Column("update_utc_date", TIMESTAMP, index=True),
            Column("street", Text, index=True),
            Column("city", Text),
            Column("country", Text),
            Column("is_highway", BOOLEAN),
            Column("speed", Float, index=True),
            Column("regular_speed", Float, index=True),
            Column("delay_seconds", INTEGER, index=True),
            Column("seconds", INTEGER, index=True),
            Column("length", INTEGER, index=True),
            Column("trend", INTEGER, index=True),
            Column("type", Text, index=True),
            Column("severity", Float, index=True),
            Column("jam_level", INTEGER, index=True),
            Column("drivers_count", INTEGER),
            Column("alerts_count", INTEGER, index=True),
            Column("n_thumbs_up", INTEGER, index=True),
            Column("n_comments", INTEGER),
            Column("n_images", INTEGER),
            Column("line", JSON),
            Column("line_geo", Geometry('LINESTRING')),
            Column("cause_type", Text),
            Column("start_node", Text),
            Column("end_node", Text),
            Column("datafile_id", BigInteger, nullable=False, index=True),
            schema='waze',
        )

        self.tables_postgis['coordinate_type'] = sa.Table(
            'coordinate_type',
            metadata,
            Column("id", INTEGER, primary_key=True, nullable=False),
            Column("type_name", Text, nullable=False),
            schema='waze',
        )

        self.tables_postgis['coordinates'] = sa.Table(
            'coordinates',
            metadata,
            Column("id", VARCHAR(40), primary_key=True, nullable=False),
            Column("latitude", Float(8), nullable=False),
            Column("longitude", Float(8), nullable=False),
            Column("order", INTEGER, nullable=False),
            Column("jam_id", INTEGER, ForeignKey('waze.jams.id')),
            Column("irregularity_id", INTEGER,
                   ForeignKey('waze.irregularities.id')),
            Column("alert_id", INTEGER, ForeignKey('waze.alerts.id')),
            Column("coordinate_type_id", INTEGER,
                   ForeignKey('waze.coordinate_type.id')),
            schema='waze',
        )

        self.tables_postgis['roads'] = sa.Table(
            'roads',
            metadata,
            Column("id", INTEGER, primary_key=True, nullable=False),
            Column("value", INTEGER, nullable=False),
            Column("name", VARCHAR(100), nullable=False),
            schema='waze',
        )

        self.tables_postgis['alert_types'] = sa.Table(
            'alert_types',
            metadata,
            Column("id", INTEGER, primary_key=True, nullable=False),
            Column("type", Text, nullable=False),
            Column("subtype", Text),
            schema='waze',
        )

        metadata.create_all(self.engine_postgis)

        try:
            self.engine_postgis.execute("""ALTER TABLE waze.roads
                    ADD CONSTRAINT roads_unique_combo UNIQUE(value, name);""")
        except sa.exc.ProgrammingError:
            pass

        try:
            self.engine_postgis.execute("""ALTER TABLE waze.alert_types
                    ADD CONSTRAINT alert_types_unique_combo 
                        UNIQUE(type, subtype);""")
        except sa.exc.ProgrammingError:
            pass

        # Insert elements
        with self.engine_postgis.connect() as conn:
            try:
                conn.execute(self.tables_postgis['coordinate_type'].insert(),
                             [{
                                 'id': 1,
                                 'type_name': 'Line'
                             }, {
                                 'id': 2,
                                 'type_name': 'Turn Line'
                             }, {
                                 'id': 3,
                                 'type_name': 'Location'
                             }])
            except sa.exc.IntegrityError:
                pass

            try:
                conn.execute(self.tables_postgis['roads'].insert(),
                             [{
                                 'value': 1,
                                 'name': 'Streets'
                             }, {
                                 'value': 2,
                                 'name': 'Primary Street'
                             }, {
                                 'value': 3,
                                 'name': 'Freeways'
                             }, {
                                 'value': 4,
                                 'name': 'Ramps'
                             }, {
                                 'value': 5,
                                 'name': 'Trails'
                             }, {
                                 'value': 6,
                                 'name': 'Primary'
                             }, {
                                 'value': 7,
                                 'name': 'Secondary'
                             }, {
                                 'value': 8,
                                 'name': '4X4 Trails'
                             }, {
                                 'value': 9,
                                 'name': 'Walkway'
                             }, {
                                 'value': 10,
                                 'name': 'Pedestrian'
                             }, {
                                 'value': 11,
                                 'name': 'Exit'
                             }, {
                                 'value': 12,
                                 'name': '?'
                             }, {
                                 'value': 13,
                                 'name': '?'
                             }, {
                                 'value': 14,
                                 'name': '4X4 Trails'
                             }, {
                                 'value': 15,
                                 'name': 'Ferry crossing'
                             }, {
                                 'value': 16,
                                 'name': 'Stairway'
                             }, {
                                 'value': 17,
                                 'name': 'Private road'
                             }, {
                                 'value': 18,
                                 'name': 'Railroads'
                             }, {
                                 'value': 19,
                                 'name': 'Runway/Taxiway'
                             }, {
                                 'value': 20,
                                 'name': 'Parking lot road'
                             }, {
                                 'value': 21,
                                 'name': 'Service road'
                             }])
            except sa.exc.IntegrityError:
                pass

            try:
                conn.execute(self.tables_postgis['alert_types'].insert(), [
                    {
                        'type': 'ACCIDENT',
                        'subtype': 'ACCIDENT_MINOR'
                    },
                    {
                        'type': 'ACCIDENT',
                        'subtype': 'ACCIDENT_MAJOR'
                    },
                    {
                        'type': 'ACCIDENT',
                        'subtype': 'NO_SUBTYPE'
                    },
                    {
                        'type': 'JAM',
                        'subtype': 'JAM_MODERATE_TRAFFIC'
                    },
                    {
                        'type': 'JAM',
                        'subtype': 'JAM_HEAVY_TRAFFIC'
                    },
                    {
                        'type': 'JAM',
                        'subtype': 'JAM_STAND_STILL_TRAFFIC'
                    },
                    {
                        'type': 'JAM',
                        'subtype': 'JAM_LIGHT_TRAFFIC'
                    },
                    {
                        'type': 'JAM',
                        'subtype': 'NO_SUBTYPE'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_SHOULDER'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD_OBJECT'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD_POT_HOLE'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD_ROAD_KILL'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_SHOULDER_CAR_STOPPED'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_SHOULDER_ANIMALS'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_SHOULDER_MISSING_SIGN'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_FOG'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_HAIL'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_HEAVY_RAIN'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_HEAVY_SNOW'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_FLOOD'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_MONSOON'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_TORNADO'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_HEAT_WAVE'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_HURRICANE'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_WEATHER_FREEZING_RAIN'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD_LANE_CLOSED'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD_OIL'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD_ICE'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD_CONSTRUCTION'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD_CAR_STOPPED'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'HAZARD_ON_ROAD_TRAFFIC_LIGHT_FAULT'
                    },
                    {
                        'type': 'WEATHERHAZARD/HAZARD',
                        'subtype': 'NO_SUBTYPE'
                    },
                    {
                        'type': 'MISC',
                        'subtype': 'NO_SUBTYPE'
                    },
                    {
                        'type': 'CONSTRUCTION',
                        'subtype': 'NO_SUBTYPE'
                    },
                    {
                        'type': 'ROAD_CLOSED',
                        'subtype': 'ROAD_CLOSED_HAZARD'
                    },
                    {
                        'type': 'ROAD_CLOSED',
                        'subtype': 'ROAD_CLOSED_CONSTRUCTION'
                    },
                    {
                        'type': 'ROAD_CLOSED',
                        'subtype': 'ROAD_CLOSED_EVENT'
                    },
                ])
            except sa.exc.IntegrityError:
                pass
예제 #30
0
	def get_table_object(self,tablename,engine):
		metadata = MetaData(bind=engine)
		rs_tab_monitor = Table(tablename, metadata,
			Column('id', Integer(), primary_key=True),
			Column('schemaname', String(255), nullable=False),
			Column('tablename', String(255), nullable=False),
			Column('pct_mem_used', Float()),
			Column('unsorted_rows', Float()),
			Column('statistics', Float()),
			Column('is_encoded', String(255)),
			Column('diststyle', String(255)),
			Column('sortkey1', String(255)),
			Column('skew_sortkey1', String(255)),
			Column('skew_rows', Float()),
			Column('m1_num_scan', Float()),
			Column('m1_row_scan', Float()),
			Column('m1_avg_time', Float()),
			Column('w1_num_scan', Float()),
			Column('w1_row_scan', Float()),
			Column('w1_avg_time', Float()),
			Column('d1_num_scan', Float()),
			Column('d1_row_scan', Float()),
			Column('d1_avg_time', Float()),
			Column('h6_num_scan', Float()),
			Column('h6_row_scan', Float()),
			Column('h6_avg_time', Float()),
			Column('h3_num_scan', Float()),
			Column('h3_row_scan', Float()),
			Column('h3_avg_time', Float()),
			Column('last_modified_on', DateTime(), default=datetime.datetime.utcnow()+ist_delta))
		rs_tab_monitor.create(checkfirst=True)
		#clear_mappers()
		mapper(RSTableMonitor, rs_tab_monitor)
		return RSTableMonitor