Ejemplo n.º 1
0
		SQLFunctionArgument('name', Unicode(255)),
		SQLFunctionArgument('pass', Unicode(255))
	),
	returns=Boolean(),
	comment='Check auth information',
	writes_sql=False
)

AcctAddProcedure = SQLFunction(
	'acct_add',
	args=(
		InArgument('aeid', UInt32()),
		InArgument('username', Unicode(255)),
		InArgument('tin', Traffic()),
		InArgument('teg', Traffic()),
		InArgument('ts', DateTime())
	),
	comment='Add accounting information',
	label='aafunc',
	is_procedure=True
)

AcctAuthzProcedure = SQLFunction(
	'acct_authz',
	args=(
		InArgument('name', Unicode(255)),
	),
	comment='Get authorized account info',
	writes_sql=False,
	label='authzfunc',
	is_procedure=True
Ejemplo n.º 2
0
class File(db.Model):

    __tablename__ = "release_files"

    @declared_attr
    def __table_args__(cls):  # noqa
        return (
            ForeignKeyConstraint(
                ["name", "version"],
                ["releases.name", "releases.version"],
                onupdate="CASCADE",
            ),
            CheckConstraint("sha256_digest ~* '^[A-F0-9]{64}$'"),
            CheckConstraint("blake2_256_digest ~* '^[A-F0-9]{64}$'"),
            Index("release_files_name_version_idx", "name", "version"),
            Index("release_files_packagetype_idx", "packagetype"),
            Index("release_files_version_idx", "version"),
            Index(
                "release_files_single_sdist",
                "name",
                "version",
                "packagetype",
                unique=True,
                postgresql_where=((cls.packagetype == 'sdist') &
                                  (cls.allow_multiple_sdist == False)  # noqa
                                  ),
            ),
        )

    name = Column(Text)
    version = Column(Text)
    python_version = Column(Text)
    requires_python = Column(Text)
    packagetype = Column(
        Enum(
            "bdist_dmg",
            "bdist_dumb",
            "bdist_egg",
            "bdist_msi",
            "bdist_rpm",
            "bdist_wheel",
            "bdist_wininst",
            "sdist",
        ), )
    comment_text = Column(Text)
    filename = Column(Text, unique=True)
    path = Column(Text, unique=True, nullable=False)
    size = Column(Integer)
    has_signature = Column(Boolean)
    md5_digest = Column(Text, unique=True, nullable=False)
    sha256_digest = Column(CIText, unique=True, nullable=False)
    blake2_256_digest = Column(CIText, unique=True, nullable=False)
    downloads = Column(Integer, server_default=sql.text("0"))
    upload_time = Column(DateTime(timezone=False), server_default=func.now())
    # We need this column to allow us to handle the currently existing "double"
    # sdists that exist in our database. Eventually we should try to get rid
    # of all of them and then remove this column.
    allow_multiple_sdist = Column(
        Boolean,
        nullable=False,
        server_default=sql.false(),
    )

    @hybrid_property
    def pgp_path(self):
        return self.path + ".asc"

    @pgp_path.expression
    def pgp_path(self):
        return func.concat(self.path, ".asc")

    @validates("requires_python")
    def validates_requires_python(self, *args, **kwargs):
        raise RuntimeError("Cannot set File.requires_python")
Ejemplo n.º 3
0
def load_world_bank_health_n_pop():
    """Loads the world bank health dataset, slices and a dashboard"""
    tbl_name = 'wb_health_population'
    with gzip.open(os.path.join(DATA_FOLDER, 'countries.json.gz')) as f:
        pdf = pd.read_json(f)
    pdf.columns = [col.replace('.', '_') for col in pdf.columns]
    pdf.year = pd.to_datetime(pdf.year)
    pdf.to_sql(tbl_name,
               db.engine,
               if_exists='replace',
               chunksize=50,
               dtype={
                   'year': DateTime(),
                   'country_code': String(3),
                   'country_name': String(255),
                   'region': String(255),
               },
               index=False)

    print("Creating table [wb_health_population] reference")
    tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
    if not tbl:
        tbl = TBL(table_name=tbl_name)
    tbl.description = utils.readfile(os.path.join(DATA_FOLDER, 'countries.md'))
    tbl.main_dttm_col = 'year'
    tbl.is_featured = True
    tbl.database = get_or_create_main_db()
    db.session.merge(tbl)
    db.session.commit()
    tbl.fetch_metadata()

    defaults = {
        "compare_lag": "10",
        "compare_suffix": "o10Y",
        "limit": "25",
        "granularity": "year",
        "groupby": [],
        "metric": 'sum__SP_POP_TOTL',
        "metrics": ["sum__SP_POP_TOTL"],
        "row_limit": config.get("ROW_LIMIT"),
        "since": "2014-01-01",
        "until": "2014-01-02",
        "where": "",
        "markup_type": "markdown",
        "country_fieldtype": "cca3",
        "secondary_metric": "sum__SP_POP_TOTL",
        "entity": "country_code",
        "show_bubbles": True,
    }

    print("Creating slices")
    slices = [
        Slice(slice_name="Region Filter",
              viz_type='filter_box',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(defaults,
                                    viz_type='filter_box',
                                    groupby=['region', 'country_name'])),
        Slice(slice_name="World's Population",
              viz_type='big_number',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(defaults,
                                    since='2000',
                                    viz_type='big_number',
                                    compare_lag="10",
                                    metric='sum__SP_POP_TOTL',
                                    compare_suffix="over 10Y")),
        Slice(slice_name="Most Populated Countries",
              viz_type='table',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(defaults,
                                    viz_type='table',
                                    metrics=["sum__SP_POP_TOTL"],
                                    groupby=['country_name'])),
        Slice(slice_name="Growth Rate",
              viz_type='line',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(defaults,
                                    viz_type='line',
                                    since="1960-01-01",
                                    metrics=["sum__SP_POP_TOTL"],
                                    num_period_compare="10",
                                    groupby=['country_name'])),
        Slice(slice_name="% Rural",
              viz_type='world_map',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(defaults,
                                    viz_type='world_map',
                                    metric="sum__SP_RUR_TOTL_ZS",
                                    num_period_compare="10")),
        Slice(slice_name="Life Expectancy VS Rural %",
              viz_type='bubble',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(
                  defaults,
                  viz_type='bubble',
                  since="2011-01-01",
                  until="2011-01-02",
                  series="region",
                  limit=0,
                  entity="country_name",
                  x="sum__SP_RUR_TOTL_ZS",
                  y="sum__SP_DYN_LE00_IN",
                  size="sum__SP_POP_TOTL",
                  max_bubble_size="50",
                  filters=[{
                      "col":
                      "country_code",
                      "val": [
                          "TCA",
                          "MNP",
                          "DMA",
                          "MHL",
                          "MCO",
                          "SXM",
                          "CYM",
                          "TUV",
                          "IMY",
                          "KNA",
                          "ASM",
                          "ADO",
                          "AMA",
                          "PLW",
                      ],
                      "op":
                      "not in"
                  }],
              )),
        Slice(slice_name="Rural Breakdown",
              viz_type='sunburst',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(
                  defaults,
                  viz_type='sunburst',
                  groupby=["region", "country_name"],
                  secondary_metric="sum__SP_RUR_TOTL",
                  since="2011-01-01",
                  until="2011-01-01",
              )),
        Slice(slice_name="World's Pop Growth",
              viz_type='area',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(
                  defaults,
                  since="1960-01-01",
                  until="now",
                  viz_type='area',
                  groupby=["region"],
              )),
        Slice(slice_name="Box plot",
              viz_type='box_plot',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(
                  defaults,
                  since="1960-01-01",
                  until="now",
                  whisker_options="Min/max (no outliers)",
                  viz_type='box_plot',
                  groupby=["region"],
              )),
        Slice(slice_name="Treemap",
              viz_type='treemap',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(
                  defaults,
                  since="1960-01-01",
                  until="now",
                  viz_type='treemap',
                  metrics=["sum__SP_POP_TOTL"],
                  groupby=["region", "country_code"],
              )),
        Slice(slice_name="Parallel Coordinates",
              viz_type='para',
              datasource_type='table',
              datasource_id=tbl.id,
              params=get_slice_json(
                  defaults,
                  since="2011-01-01",
                  until="2011-01-01",
                  viz_type='para',
                  limit=100,
                  metrics=[
                      "sum__SP_POP_TOTL", 'sum__SP_RUR_TOTL_ZS',
                      'sum__SH_DYN_AIDS'
                  ],
                  secondary_metric='sum__SP_POP_TOTL',
                  series="country_name",
              )),
    ]
    misc_dash_slices.append(slices[-1].slice_name)
    for slc in slices:
        merge_slice(slc)

    print("Creating a World's Health Bank dashboard")
    dash_name = "World's Bank Data"
    slug = "world_health"
    dash = db.session.query(Dash).filter_by(slug=slug).first()

    if not dash:
        dash = Dash()
    js = textwrap.dedent("""\
    [
        {
            "col": 1,
            "row": 0,
            "size_x": 2,
            "size_y": 2,
            "slice_id": "1231"
        },
        {
            "col": 1,
            "row": 2,
            "size_x": 2,
            "size_y": 2,
            "slice_id": "1232"
        },
        {
            "col": 10,
            "row": 0,
            "size_x": 3,
            "size_y": 7,
            "slice_id": "1233"
        },
        {
            "col": 1,
            "row": 4,
            "size_x": 6,
            "size_y": 3,
            "slice_id": "1234"
        },
        {
            "col": 3,
            "row": 0,
            "size_x": 7,
            "size_y": 4,
            "slice_id": "1235"
        },
        {
            "col": 5,
            "row": 7,
            "size_x": 8,
            "size_y": 4,
            "slice_id": "1236"
        },
        {
            "col": 7,
            "row": 4,
            "size_x": 3,
            "size_y": 3,
            "slice_id": "1237"
        },
        {
            "col": 1,
            "row": 7,
            "size_x": 4,
            "size_y": 4,
            "slice_id": "1238"
        },
        {
            "col": 9,
            "row": 11,
            "size_x": 4,
            "size_y": 4,
            "slice_id": "1239"
        },
        {
            "col": 1,
            "row": 11,
            "size_x": 8,
            "size_y": 4,
            "slice_id": "1240"
        }
    ]
    """)
    l = json.loads(js)
    for i, pos in enumerate(l):
        pos['slice_id'] = str(slices[i].id)

    dash.dashboard_title = dash_name
    dash.position_json = json.dumps(l, indent=4)
    dash.slug = slug

    dash.slices = slices[:-1]
    db.session.merge(dash)
    db.session.commit()
Ejemplo n.º 4
0
class ResultTest(Base):
    __tablename__ = "result_test"
    id = Column(String, primary_key=True)
    to_number = Column(String(20))
    label = Column(String(50))
    success = Column(Boolean())
    call_status = Column(String(30))
    transcription = Column(String(30))
    transcripted_text = Column(Text())
    transcripted_quality = Column(Float())
    created_at = Column(DateTime())
    updated_at = Column(DateTime())
    start_at = Column(DateTime())
    end_at = Column(DateTime())
    call_duration = Column(Integer())
    recording_duration = Column(Integer())
    recording_sid = Column(Text())
    recording_url = Column(Text())
    error_code = Column(Integer())
    alarmed_at = Column(DateTime())

    def json(self):
        return {
            "id": self.id,
            "to-number": self.to_number,
            "label": self.label,
            "success": self.success,
            "call-status": self.call_status,
            "transcription": self.transcription,
            "transcripted-text": self.transcripted_text,
            "transcripted-quality": self.transcripted_quality,
            "created-at": str(self.created_at),
            "updated-at": str(self.updated_at),
            "start-at": str(self.start_at),
            "end-at": str(self.end_at),
            "call-duration": self.call_duration,
            "recording-duration": self.recording_duration,
            "recording-sid": self.recording_sid,
            "recording-url": self.recording_url,
            "error-code": self.error_code,
            "alarmed_at": str(self.alarmed_at),
        }

    def minimal_json(self):
        return {
            "id": self.id,
            "to-number": self.to_number,
            "label": self.label,
            "success": self.success,
            "call-status": self.call_status,
            "transcription": self.transcription,
            "start-at": str(self.start_at),
            "recording-duration": self.recording_duration,
            "alarmed_at": str(self.alarmed_at),
        }

    @classmethod
    def find_by_id(cls, id):
        return session.query(cls).filter_by(id=id).one_or_none()

    @classmethod
    def find_by_to_number(cls, number, limit=3):
        return (session.query(cls).filter_by(to_number=number).order_by(
            ResultTest.start_at.desc()).limit(limit).all())

    @classmethod
    def find_all(cls):
        return session.query(cls).all()

    @classmethod
    def get_last_unlarmed_faileds(cls):
        unlarmeds = {}
        for ura in Ura.get_uras_numbers():
            results_obj = (session.query(cls).filter_by(
                to_number=ura, success=False,
                alarmed_at=None).order_by(ResultTest.start_at.desc()).all())

            unlarmeds.update({ura: results_obj})

        return unlarmeds

    @classmethod
    def get_failures_per_day(cls, from_date, to_date):
        """
            SELECT count(success), date(start_at) from result_test where success=0 group by date(start_at) ORDER BY (start_at) DESC LIMIT 1
        """
        res = []

        with engine.connect() as con:
            failures = con.execute(
                f"SELECT COUNT(success), DATE(start_at) FROM result_test WHERE success=FALSE AND DATE(start_at) >= DATE('{from_date}') AND DATE(start_at) <= DATE('{to_date}') GROUP BY success, DATE(start_at) ORDER BY DATE(start_at) DESC"
            )

            for row in failures:
                res.append(list(row))

        return res

    @classmethod
    def get_successes_per_day(cls, from_date, to_date):
        """
            SELECT COUNT(success), DATE(start_at) FROM result_test WHERE success=true AND DATE(start_at) >= DATE('2020-03-18') AND DATE(start_at) <= DATE('2020-03-20') GROUP BY DATE(start_at) ORDER BY (start_at) DESC
        """

        res = []

        with engine.connect() as con:
            successes = con.execute(
                f"SELECT COUNT(success), DATE(start_at) FROM result_test WHERE success=TRUE AND DATE(start_at) >= DATE('{from_date}') AND DATE(start_at) <= DATE('{to_date}') GROUP BY success, DATE(start_at) ORDER BY DATE(start_at) DESC"
            )

            for row in successes:
                res.append(list(row))

        return res

    @classmethod
    def find_results(cls, uras=list(), n_last_results=3):
        results = {}
        uras = uras if uras else Ura.get_uras_numbers()

        for ura in uras:
            results_obj = ResultTest.find_by_to_number(ura,
                                                       limit=n_last_results)
            test_results = [r.minimal_json() for r in results_obj]
            results.update({ura: test_results})

        return results

    def save_to_db(self):
        session.add(self)
        session.commit()
        session.close()

    def delete_from_db(self):
        session.delete(self)
        session.commit()
        session.close()
Ejemplo n.º 5
0
 def test_compare_current_timestamp_fn_w_binds(self):
     self._compare_default_roundtrip(
         DateTime(), func.timezone("utc", func.current_timestamp()))
Ejemplo n.º 6
0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

from sqlalchemy import Column, Table, MetaData
from sqlalchemy import Integer, BigInteger, DateTime, Boolean, String

from nova import log as logging

meta = MetaData()

bw_cache = Table(
    'bw_usage_cache', meta, Column('created_at', DateTime(timezone=False)),
    Column('updated_at', DateTime(timezone=False)),
    Column('deleted_at', DateTime(timezone=False)),
    Column('deleted', Boolean(create_constraint=True, name=None)),
    Column('id', Integer(), primary_key=True, nullable=False),
    Column('instance_id', Integer(), nullable=False),
    Column(
        'network_label',
        String(length=255,
               convert_unicode=False,
               assert_unicode=None,
               unicode_error=None,
               _warn_on_bytestring=False)),
    Column('start_period', DateTime(timezone=False), nullable=False),
    Column('last_refreshed', DateTime(timezone=False)),
    Column('bw_in', BigInteger()), Column('bw_out', BigInteger()))
Ejemplo n.º 7
0
class WitnessUpdateOperation(Base):
    """

    Steem Blockchain Example
    ======================
    {
      "fee": "0.000 STEEM",
      "url": "fmooo/steemd-docker",
      "props": {
        "sbd_interest_rate": 1000,
        "account_creation_fee": "100.000 STEEM",
        "maximum_block_size": 131072
      },
      "owner": "steempty",
      "block_signing_key": "STM8LoQjQqJHvotqBo7HjnqmUbFW9oJ2theyqonzUd9DdJ7YYHsvD"
    }

    """

    __tablename__ = 'sbds_op_witness_updates'
    __table_args__ = (
        PrimaryKeyConstraint('block_num', 'transaction_num', 'operation_num'),
        ForeignKeyConstraint(['owner'], ['sbds_meta_accounts.name'],
                             deferrable=True,
                             initially='DEFERRED',
                             use_alter=True),
    )

    block_num = Column(Integer, nullable=False, index=True)
    transaction_num = Column(SmallInteger, nullable=False, index=True)
    operation_num = Column(SmallInteger, nullable=False, index=True)
    trx_id = Column(String(40), nullable=False)
    timestamp = Column(DateTime(timezone=False))
    owner = Column(String(16))  # steem_type:account_name_type
    url = Column(UnicodeText)  # steem_type:string -> default
    block_signing_key = Column(String(60),
                               nullable=False)  # steem_type:public_key_type
    props = Column(JSONB)  # steem_type:chain_properties
    fee = Column(Numeric(20, 6), nullable=False)  # steem_type:asset
    fee_symbol = Column(String(5))  # steem_type:asset
    operation_type = Column(operation_types_enum,
                            nullable=False,
                            index=True,
                            default='witness_update')

    _fields = dict(
        props=lambda x: json_string_field(x.get('props')
                                          ),  # steem_type:chain_properties
        fee=lambda x: amount_field(x.get('fee'), num_func=float
                                   ),  # steem_type:asset
        fee_symbol=lambda x: amount_symbol_field(x.get('fee')
                                                 ),  # steem_type:asset
    )

    _account_fields = frozenset([
        'owner',
    ])

    def dump(self):
        return dissoc(self.__dict__, '_sa_instance_state')

    def to_dict(self, decode_json=True):
        data_dict = self.dump()
        if isinstance(data_dict.get('json_metadata'), str) and decode_json:
            data_dict['json_metadata'] = sbds.sbds_json.loads(
                data_dict['json_metadata'])
        return data_dict

    def to_json(self):
        data_dict = self.to_dict()
        return sbds.sbds_json.dumps(data_dict)

    def __repr__(self):
        return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % (
            self.__class__.__name__, self.block_num, self.transaction_num,
            self.operation_num, tuple(self.dump().keys()))

    def __str__(self):
        return str(self.dump())
Ejemplo n.º 8
0
class DBCbsd(Base):
    """
    SAS DB CBSD class
    """
    __tablename__ = "cbsds"
    id = Column(Integer, primary_key=True, autoincrement=True)
    state_id = Column(
        Integer,
        ForeignKey(
            "cbsd_states.id",
            ondelete="CASCADE",
        ),
        nullable=False,
    )
    cbsd_id = Column(String)
    user_id = Column(String)
    fcc_id = Column(String)
    cbsd_serial_number = Column(String)
    last_seen = Column(DateTime(timezone=True))
    min_power = Column(Float)
    max_power = Column(Float)
    antenna_gain = Column(Float)
    number_of_ports = Column(Integer)
    network_id = Column(String)
    created_date = Column(
        DateTime(timezone=True),
        nullable=False,
        server_default=now(),
    )
    updated_date = Column(
        DateTime(timezone=True),
        server_default=now(),
        onupdate=now(),
    )

    state = relationship(
        "DBCbsdState",
        back_populates="cbsds",
        cascade="all, delete",
        passive_deletes=True,
    )
    requests = relationship(
        "DBRequest",
        back_populates="cbsd",
        cascade="all, delete",
        passive_deletes=True,
    )
    grants = relationship(
        "DBGrant",
        back_populates="cbsd",
        cascade="all, delete",
        passive_deletes=True,
    )
    channels = relationship(
        "DBChannel",
        back_populates="cbsd",
        cascade="all, delete",
        passive_deletes=True,
    )
    active_mode_config = relationship(
        "DBActiveModeConfig",
        back_populates="cbsd",
        cascade="all, delete",
        passive_deletes=True,
    )

    def __repr__(self):
        """
        Return string representation of DB object
        """
        class_name = self.__class__.__name__
        state_name = self.state.name
        return f"<{class_name}(id='{self.id}', " \
               f"state='{state_name}', " \
               f"cbsd_id='{self.cbsd_id}', " \
               f"user_id='{self.user_id}', " \
               f"fcc_id='{self.fcc_id}', " \
               f"cbsd_serial_number='{self.cbsd_serial_number}', " \
               f"created_date='{self.created_date}' " \
               f"updated_date='{self.updated_date}')>"
Ejemplo n.º 9
0
class Release(db.ModelBase):

    __tablename__ = "releases"

    @declared_attr
    def __table_args__(cls):  # noqa
        return (
            Index("release_created_idx", cls.created.desc()),
            Index("release_name_created_idx", cls.name, cls.created.desc()),
            Index("release_name_idx", cls.name),
            Index("release_pypi_hidden_idx", cls._pypi_hidden),
            Index("release_version_idx", cls.version),
        )

    __repr__ = make_repr("name", "version")

    name = Column(
        Text,
        ForeignKey("packages.name", onupdate="CASCADE"),
        primary_key=True,
    )
    version = Column(Text, primary_key=True)
    is_prerelease = orm.column_property(func.pep440_is_prerelease(version))
    author = Column(Text)
    author_email = Column(Text)
    maintainer = Column(Text)
    maintainer_email = Column(Text)
    home_page = Column(Text)
    license = Column(Text)
    summary = Column(Text)
    keywords = Column(Text)
    platform = Column(Text)
    download_url = Column(Text)
    _pypi_ordering = Column(Integer)
    _pypi_hidden = Column(Boolean)
    cheesecake_installability_id = Column(
        Integer,
        ForeignKey("cheesecake_main_indices.id"),
    )
    cheesecake_documentation_id = Column(
        Integer,
        ForeignKey("cheesecake_main_indices.id"),
    )
    cheesecake_code_kwalitee_id = Column(
        Integer,
        ForeignKey("cheesecake_main_indices.id"),
    )
    requires_python = Column(Text)
    description_from_readme = Column(Boolean)
    created = Column(
        DateTime(timezone=False),
        nullable=False,
        server_default=sql.func.now(),
    )

    # We defer this column because it is a very large column (it can be MB in
    # size) and we very rarely actually want to access it. Typically we only
    # need it when rendering the page for a single project, but many of our
    # queries only need to access a few of the attributes of a Release. Instead
    # of playing whack-a-mole and using load_only() or defer() on each of
    # those queries, deferring this here makes the default case more
    # performant.
    description = orm.deferred(Column(Text))

    _classifiers = orm.relationship(
        Classifier,
        backref="project_releases",
        secondary=lambda: release_classifiers,
        order_by=Classifier.classifier,
    )
    classifiers = association_proxy("_classifiers", "classifier")

    files = orm.relationship(
        "File",
        backref="release",
        cascade="all, delete-orphan",
        lazy="dynamic",
        order_by=lambda: File.filename,
    )

    dependencies = orm.relationship("Dependency")

    _requires = _dependency_relation(DependencyKind.requires)
    requires = association_proxy("_requires", "specifier")

    _provides = _dependency_relation(DependencyKind.provides)
    provides = association_proxy("_provides", "specifier")

    _obsoletes = _dependency_relation(DependencyKind.obsoletes)
    obsoletes = association_proxy("_obsoletes", "specifier")

    _requires_dist = _dependency_relation(DependencyKind.requires_dist)
    requires_dist = association_proxy("_requires_dist", "specifier")

    _provides_dist = _dependency_relation(DependencyKind.provides_dist)
    provides_dist = association_proxy("_provides_dist", "specifier")

    _obsoletes_dist = _dependency_relation(DependencyKind.obsoletes_dist)
    obsoletes_dist = association_proxy("_obsoletes_dist", "specifier")

    _requires_external = _dependency_relation(DependencyKind.requires_external)
    requires_external = association_proxy("_requires_external", "specifier")

    _project_urls = _dependency_relation(DependencyKind.project_url)
    project_urls = association_proxy("_project_urls", "specifier")

    uploader = orm.relationship(
        "User",
        secondary=lambda: JournalEntry.__table__,
        primaryjoin=lambda: (
            (JournalEntry.name == orm.foreign(Release.name)) &
            (JournalEntry.version == orm.foreign(Release.version)) &
            (JournalEntry.action == "new release")),
        secondaryjoin=lambda: (
            (User.username == orm.foreign(JournalEntry._submitted_by))
        ),
        order_by=lambda: JournalEntry.submitted_date.desc(),
        # TODO: We have uselist=False here which raises a warning because
        # multiple items were returned. This should only be temporary because
        # we should add a nullable FK to JournalEntry so we don't need to rely
        # on ordering and implicitly selecting the first object to make this
        # happen,
        uselist=False,
        viewonly=True,
    )

    def __acl__(self):
        session = orm.object_session(self)
        acls = [
            (Allow, "group:admins", "admin"),
        ]

        # Get all of the users for this project.
        query = session.query(Role).filter(Role.project == self)
        query = query.options(orm.lazyload("project"))
        query = query.options(orm.joinedload("user").lazyload("emails"))
        for role in sorted(
                query.all(),
                key=lambda x: ["Owner", "Maintainer"].index(x.role_name)):
            if role.role_name == "Owner":
                acls.append((Allow, str(role.user.id), ["manage", "upload"]))
            else:
                acls.append((Allow, str(role.user.id), ["upload"]))
        return acls

    @property
    def urls(self):
        _urls = OrderedDict()

        if self.home_page:
            _urls["Homepage"] = self.home_page

        for urlspec in self.project_urls:
            name, url = [x.strip() for x in urlspec.split(",", 1)]
            _urls[name] = url

        if self.download_url and "Download" not in _urls:
            _urls["Download"] = self.download_url

        return _urls

    @property
    def has_meta(self):
        return any([self.license,
                    self.keywords,
                    self.author, self.author_email,
                    self.maintainer, self.maintainer_email,
                    self.requires_python])
Ejemplo n.º 10
0
class Project(SitemapMixin, db.ModelBase):

    __tablename__ = "packages"
    __table_args__ = (
        CheckConstraint(
            "name ~* '^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$'::text",
            name="packages_valid_name",
        ),
    )

    __repr__ = make_repr("name")

    name = Column(Text, primary_key=True, nullable=False)
    normalized_name = orm.column_property(func.normalize_pep426_name(name))
    stable_version = Column(Text)
    autohide = Column(Boolean, server_default=sql.true())
    comments = Column(Boolean, server_default=sql.true())
    bugtrack_url = Column(Text)
    hosting_mode = Column(Text, nullable=False, server_default="pypi-only")
    created = Column(
        DateTime(timezone=False),
        nullable=False,
        server_default=sql.func.now(),
    )
    has_docs = Column(Boolean)
    upload_limit = Column(Integer, nullable=True)
    last_serial = Column(Integer, nullable=False, server_default=sql.text("0"))
    allow_legacy_files = Column(
        Boolean,
        nullable=False,
        server_default=sql.false(),
    )
    zscore = Column(Float, nullable=True)

    users = orm.relationship(
        User,
        secondary=Role.__table__,
        backref="projects",
    )

    releases = orm.relationship(
        "Release",
        backref="project",
        cascade="all, delete-orphan",
        order_by=lambda: Release._pypi_ordering.desc(),
    )

    def __getitem__(self, version):
        session = orm.object_session(self)

        try:
            return (
                session.query(Release)
                       .filter((Release.project == self) &
                               (Release.version == version))
                       .one()
            )
        except NoResultFound:
            raise KeyError from None

    def __acl__(self):
        session = orm.object_session(self)
        acls = [
            (Allow, "group:admins", "admin"),
        ]

        # Get all of the users for this project.
        query = session.query(Role).filter(Role.project == self)
        query = query.options(orm.lazyload("project"))
        query = query.options(orm.joinedload("user").lazyload("emails"))
        for role in sorted(
                query.all(),
                key=lambda x: ["Owner", "Maintainer"].index(x.role_name)):
            if role.role_name == "Owner":
                acls.append((Allow, str(role.user.id), ["manage", "upload"]))
            else:
                acls.append((Allow, str(role.user.id), ["upload"]))
        return acls

    @property
    def documentation_url(self):
        # TODO: Move this into the database and elimnate the use of the
        #       threadlocal here.
        request = get_current_request()

        # If the project doesn't have docs, then we'll just return a None here.
        if not self.has_docs:
            return

        return request.route_url("legacy.docs", project=self.name)
Ejemplo n.º 11
0
class CustomJsonOperation(Base):
    """

    dPay Blockchain Example
    ======================
    {
      "required_auths": [],
      "id": "follow",
      "json": "{\"follower\":\"jared\",\"following\":\"stan\",\"what\":[\"posts\"]}",
      "required_posting_auths": [
        "jared"
      ]
    }

    """

    __tablename__ = 'dpds_op_custom_jsons'
    __table_args__ = (PrimaryKeyConstraint('block_num', 'transaction_num',
                                           'operation_num'), )

    block_num = Column(Integer, nullable=False, index=True)
    transaction_num = Column(SmallInteger, nullable=False, index=True)
    operation_num = Column(SmallInteger, nullable=False, index=True)
    trx_id = Column(String(40), nullable=False)
    timestamp = Column(DateTime(timezone=False))
    required_auths = Column(JSONB)  # dpay_type:flat_set< account_name_type>
    required_posting_auths = Column(
        JSONB)  # dpay_type:flat_set< account_name_type>
    id = Column(UnicodeText)  # dpay_type:string -> default
    json = Column(JSONB)  # name:json
    operation_type = Column(operation_types_enum,
                            nullable=False,
                            index=True,
                            default='custom_json')

    _fields = dict(
        required_auths=lambda x: json_string_field(x.get(
            'required_auths')),  # dpay_type:flat_set< account_name_type>
        required_posting_auths=lambda x: json_string_field(
            x.get('required_posting_auths')
        ),  # dpay_type:flat_set< account_name_type>
        json=lambda x: json_string_field(x.get('json')),  # name:json
    )

    _account_fields = frozenset([])

    def dump(self):
        return dissoc(self.__dict__, '_sa_instance_state')

    def to_dict(self, decode_json=True):
        data_dict = self.dump()
        if isinstance(data_dict.get('json_metadata'), str) and decode_json:
            data_dict['json_metadata'] = dpds.dpds_json.loads(
                data_dict['json_metadata'])
        return data_dict

    def to_json(self):
        data_dict = self.to_dict()
        return dpds.dpds_json.dumps(data_dict)

    def __repr__(self):
        return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % (
            self.__class__.__name__, self.block_num, self.transaction_num,
            self.operation_num, tuple(self.dump().keys()))

    def __str__(self):
        return str(self.dump())
Ejemplo n.º 12
0
class utcnow(expression.FunctionElement):
    type = DateTime()
Ejemplo n.º 13
0
class Hackathon(db.Model):
    """
    Main event model connecting workshops, assets, can be approved or
    rejected by a Developer Students Club's lead after being reviewed as an
    application made by a Developer Students' Club member.

        MANY Hackathon can host MANY Workshop -> associative table
        Hackathon_Workshop MANY Hackathon can have MANY Category ->
        associative table Hackathon_Category MANY Hackathon can have MANY
        Item -> associative table Hackathon_Item ONE Hackathon can have ONE
        Status
    """

    __tablename__ = 'hackathon'

    id = Column(Integer, primary_key=True)
    name = Column(String)
    start_time = db.Column(DateTime(timezone=True))
    end_time = db.Column(DateTime(timezone=True))
    place_name = Column(String)

    # many to many
    categories = relationship("Hackathon_Category",
                              cascade="all,delete",
                              backref="category")
    workshops = relationship("Hackathon_Workshop",
                             cascade="all,delete",
                             backref="workshop")
    items = relationship("Hackathon_Item",
                         cascade="all,delete",
                         backref="item")

    # one to one
    status_id = Column(Integer, ForeignKey('status.id'))
    status = relationship("Status", backref="hackathon")

    def short_serialize(self):
        """
        short_serialize()
            short form representation of the Hackathon model
        """

        return {
            'id': self.id,
            'name': self.name,
            'start_time': self.start_time,
            'end_time': self.end_time,
            'place_name': self.place_name
        }

    def full_serialize(self):
        """
            full_serialize()
                full form representation of the Hackathon model
        """
        return {
            'id': self.id,
            'name': self.name,
            'start_time': self.start_time,
            'end_time': self.end_time,
            'place_name': self.place_name,
            'status_id': self.status.id
            # TODO get childern's names
        }

    def insert(self):
        db.session.add(self)
        db.session.commit()

    def delete(self):
        db.session.delete(self)
        db.session.commit()

    def update(self):
        db.session.commit()

    def __repr__(self):
        return json.dumps(self.full_serialize())
Ejemplo n.º 14
0
 def already_happened(self):
     return DateTime.now() > self.start_time
Ejemplo n.º 15
0
class Task(Base):
    __tablename__ = 'task'

    STATUS = (
        ('new', _(u'new')),
        ('enquiry', _(u'enquiry')),
        ('in_work', _(u'in work')),
        ('ready', _(u'ready')),
    )

    id = Column(Integer, autoincrement=True, primary_key=True)
    resource_id = Column(
        Integer,
        ForeignKey(
            'resource.id',
            name="fk_resource_id_task",
            ondelete='restrict',
            onupdate='cascade',
        ),
        nullable=False,
    )
    title = Column(
        String(length=128),
        nullable=False,
    )
    deadline = Column(
        DateTime(timezone=True),
        nullable=False,
    )
    reminder = Column(Integer, )
    descr = Column(String, )
    status = Column(
        EnumIntType(STATUS),
        default='new',
        nullable=False,
    )
    resource = relationship(
        'Resource',
        backref=backref('task', uselist=False, cascade="all,delete"),
        foreign_keys=[resource_id],
        cascade="all,delete",
        uselist=False,
    )
    task_resource = relationship(
        'Resource',
        secondary=task_resource,
        backref=backref(
            'tasks',
            uselist=True,
            lazy='dynamic',
        ),
        uselist=False,
    )
    uploads = relationship(
        'Upload',
        secondary=task_upload,
        backref=backref('task', uselist=False),
        cascade="all,delete",
        uselist=True,
    )

    @classmethod
    def get(cls, id):
        if id is None:
            return None
        return DBSession.query(cls).get(id)

    @classmethod
    def by_resource_id(cls, resource_id):
        if resource_id is None:
            return None
        return (DBSession.query(cls).filter(
            cls.resource_id == resource_id).first())

    @property
    def reminder_datetime(self):
        if self.reminder:
            return self.deadline - timedelta(minutes=self.reminder)
Ejemplo n.º 16
0
    )

document_namespaces = Table('document_namespaces', meta,
    Column('document_namespace_id', Integer, primary_key=True),
    Column('uri', String(500), nullable=False),
    UniqueConstraint('uri', name='uc_document_namespace_uri')
    )

documents = Table('documents', meta,
    Column('document_id', Integer, primary_key=True),
    Column('document_namespace_id', Integer, nullable=False),
    Column('data_license_id', Integer, nullable=False),
    Column('spdx_version', String(255), nullable=False),
    Column('name', String(255), nullable=False),
    Column('license_list_version', String(255), nullable=False),
    Column('created_ts', DateTime(timezone=True), nullable=False, default=func.current_timestamp()),
    Column('creator_comment', Text, nullable=False),
    Column('document_comment', Text, nullable=False),
    Column('package_id', Integer, nullable=False),
    UniqueConstraint('document_namespace_id', name='uc_document_document_namespace_id'),
    ForeignKeyConstraint(['document_namespace_id'], ['document_namespaces.document_namespace_id']),
    ForeignKeyConstraint(['data_license_id'], ['licenses.license_id']),
    ForeignKeyConstraint(['package_id'], ['packages.package_id']),
    )

external_refs = Table('external_refs', meta,
    Column('external_ref_id', Integer, primary_key=True),
    Column('document_id', Integer, nullable=False),
    Column('document_namespace_id', Integer, nullable=False),
    Column('id_string', String(255), nullable=False),
    Column('sha1', String(40), nullable=False),
Ejemplo n.º 17
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # New table
    quota_classes = Table(
        'quota_classes',
        meta,
        Column('created_at', DateTime(timezone=False)),
        Column('updated_at', DateTime(timezone=False)),
        Column('deleted_at', DateTime(timezone=False)),
        Column('deleted', Boolean(create_constraint=True, name=None)),
        Column('id', Integer(), primary_key=True),
        Column('class_name', String(length=255), index=True),
        Column('resource', String(length=255)),
        Column('hard_limit', Integer(), nullable=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    try:
        quota_classes.create()
    except Exception:
        LOG.error(_LE("Table |%s| not created!"), repr(quota_classes))
        raise

    quota_usages = Table(
        'quota_usages',
        meta,
        Column('created_at', DateTime(timezone=False)),
        Column('updated_at', DateTime(timezone=False)),
        Column('deleted_at', DateTime(timezone=False)),
        Column('deleted', Boolean(create_constraint=True, name=None)),
        Column('id', Integer(), primary_key=True),
        Column('project_id', String(length=255), index=True),
        Column('resource', String(length=255)),
        Column('in_use', Integer(), nullable=False),
        Column('reserved', Integer(), nullable=False),
        Column('until_refresh', Integer(), nullable=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    try:
        quota_usages.create()
    except Exception:
        LOG.error(_LE("Table |%s| not created!"), repr(quota_usages))
        raise

    reservations = Table(
        'reservations',
        meta,
        Column('created_at', DateTime(timezone=False)),
        Column('updated_at', DateTime(timezone=False)),
        Column('deleted_at', DateTime(timezone=False)),
        Column('deleted', Boolean(create_constraint=True, name=None)),
        Column('id', Integer(), primary_key=True),
        Column('uuid', String(length=36), nullable=False),
        Column('usage_id',
               Integer(),
               ForeignKey('quota_usages.id'),
               nullable=False),
        Column('project_id', String(length=255), index=True),
        Column('resource', String(length=255)),
        Column('delta', Integer(), nullable=False),
        Column('expire', DateTime(timezone=False)),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    try:
        reservations.create()
    except Exception:
        LOG.error(_LE("Table |%s| not created!"), repr(reservations))
        raise
Ejemplo n.º 18
0
 def test_compare_current_timestamp_text(self):
     self._compare_default_roundtrip(
         DateTime(), text("TIMEZONE('utc', CURRENT_TIMESTAMP)"))
Ejemplo n.º 19
0
from sqlalchemy import (Table, Column, Integer, String, DateTime, ForeignKey)
from sqlalchemy.sql import func
from resources import metadata

users = Table(
    "user",
    metadata,
    Column("id", Integer, primary_key=True),
    Column("username", String),
    Column("password", String),
    Column("created_on", DateTime(timezone=True), server_default=func.now()),
)
Ejemplo n.º 20
0
class File(DeclarativeBase, FileMixin):
    """Represents an item on the filesystem
    """
    __versioned__ = {"base_classes": (FileVersionMixin, DeclarativeBase)}

    #: True means: physical file should be deleted when database object is deleted
    unlink_after_deletion = False

    def __init__(self, path, filetype, mimetype, node=None, **kwargs):
        # legacy stuff
        datadir = config.settings["paths.datadir"]
        if path.startswith(datadir):
            warn("file path starts with paths.datadir, should be relative",
                 DeprecationWarning)
            path = path[len(datadir):]
        if "type" in kwargs:
            raise Exception(
                "type keyword arg is not allowed anymore, use filetype")
        if "filename" in kwargs:
            raise Exception(
                "name positional arg is not allowed anymore, use path")

        self.path = path
        self.filetype = filetype
        self.mimetype = mimetype
        if node is not None:
            self.node = node

    __tablename__ = "file"
    id = integer_pk()
    path = C(Unicode(4096))
    filetype = C(Unicode(126))
    mimetype = C(String(255))
    _size = C('size', BigInteger)
    # Checksum/hash columns
    sha512 = C(String(128))  # LargeBinary could be an alternative
    sha512_created_at = C(DateTime())
    sha512_checked_at = C(DateTime())
    sha512_ok = C(Boolean())

    nodes = rel(Node,
                secondary=NodeToFile.__table__,
                backref=bref("files",
                             lazy="dynamic",
                             query_class=AppenderQueryWithLen),
                lazy="dynamic")

    _node_objects = rel(Node,
                        secondary=NodeToFile.__table__,
                        backref=bref("file_objects", viewonly=True),
                        viewonly=True)

    def unlink(self):
        if self.exists:
            os.unlink(self.abspath)
        else:
            logg.warn(
                "tried to unlink missing physical file %s at %s, ignored",
                self.id, self.path)

    def __repr__(self):
        return "File #{} ({}:{}|{}) at {}".format(self.id, self.path,
                                                  self.filetype, self.mimetype,
                                                  hex(id(self)))

    def __unicode__(self):
        return u"# {} {} {} in {}".format(self.id, self.filetype,
                                          self.mimetype, self.path)

    @property
    def size(self):
        """Return size of file in bytes"""
        if self._size is None:
            return get_filesize(self.path)
        return self._size

    @property
    def size_humanized(self):
        """Return string with the size in human-friendly format, e.g. '7.9 kB'"""
        return humanize.naturalsize(self.size)

    def calculate_sha512(self):
        """Calculate the hash from the file on disk."""
        if not self.exists:
            return None
        return sha512_from_file(self.abspath)

    def update_sha512(self):
        """Overwrite the stored checksum value with the current checksum of the file on disk.
        Use with caution, should not be necessary under usual circumstances!"""
        if not self.exists:
            return None
        logg.info('Updating sha512 for file ID: %s.' % self.id)
        self.sha512 = self.calculate_sha512()
        self.sha512_ok = True
        self.sha512_created_at = self.sha512_checked_at = datetime.utcnow()
        self._size = get_filesize(self.path)
        return self.sha512

    def get_or_create_sha512(self):
        """Return the stored hash. If there is none, create and store it."""
        if not self.exists:
            return None, False
        created = False
        if not self.sha512:
            created = True
            logg.info('Checksum not in DB, creating it for file ID: %s.' %
                      self.id)
            self.update_sha512()
        return self.sha512, created

    def verify_checksum(self):
        """Make sure the file exists and has the same checksum as before"""
        if not self.exists:
            #raise IOError()
            logg.warn('check_checksum: file %s does not exist at %s!' %
                      (self.id, self.abspath))
            self.sha512_ok = None
            return None
        self.sha512_checked_at = datetime.utcnow()
        sha_stored, created = self.get_or_create_sha512()
        if created:
            # checksum was just created, skip a second calculation of the hash
            return True
        else:
            sha_calculated = self.calculate_sha512()
            if sha_stored == sha_calculated and sha_calculated is not None:
                logg.debug('Matching checksums :) for file ID: %s.' % self.id)
                self.sha512_ok = True
            else:
                logg.warn('Checksum mismatch for file ID: %s.' % self.id)
                self.sha512_ok = False
        return self.sha512_ok
Ejemplo n.º 21
0
class User(UserMixin, BaseModel, CommonModel, Base):
    last_login_date = Column(DateTime(timezone=True), nullable=True)
    registered_date = Column(DateTime(timezone=True),
                             nullable=False,
                             default=datetime.utcnow)

    def _get_password(self):
        return self._password

    def _set_password(self, password):
        self._password = self.set_password(password)

    password = property(_get_password, _set_password)

    def get_groups(self):
        return UserGroup.get_by_user(self)

    def last_login_date_tz(self):
        return as_timezone(self.last_login_date)

    def registered_date_tz(self):
        return as_timezone(self.registered_date)

    def nice_username(self):
        return self.user_name or self.email

    def nip_pbb(self):
        from ..tools import get_settings
        settings = get_settings()
        if self.user_name == 'admin':
            return '060000000000000000'

        if "pbb.url" in settings and settings["pbb.url"]:
            from ..pbb.models import pbbDBSession
            from ..pbb.models.pegawai import DatLogin
            row = pbbDBSession.query(DatLogin).\
                    filter_by(nm_login = self.user_name).first()
            if row:
                return row.nip
        return

    def kode(self):
        pass

    @classmethod
    def get_by_email(cls, email):
        return DBSession.query(cls).filter_by(email=email).first()

    @classmethod
    def get_by_name(cls, name):
        return DBSession.query(cls).filter_by(user_name=name).first()

    @classmethod
    def get_by_identity(cls, identity):
        if identity.find('@') > -1:
            return cls.get_by_email(identity)
        return cls.get_by_name(identity)

    @classmethod
    def get_by_token(cls, token):
        return cls.query().filter_by(security_code=token)
Ejemplo n.º 22
0
# Map a dataset type to how we will store it (storage_type and each measurement/band).
STORAGE_TYPE = Table(
    'storage_type',
    _core.METADATA,
    Column('id', SmallInteger, primary_key=True, autoincrement=True),

    # A name/label for this storage type (eg. 'ls7_nbar'). Specified by users.
    Column('name', String, unique=True, nullable=False),

    # Match any datasets whose metadata is a superset of this.
    Column('dataset_metadata', postgres.JSONB, nullable=False),
    Column('definition', postgres.JSONB, nullable=False),

    # When it was added and by whom.
    Column('added',
           DateTime(timezone=True),
           server_default=func.now(),
           nullable=False),
    Column('added_by',
           String,
           server_default=func.current_user(),
           nullable=False),

    # Name must be alphanumeric + underscores.
    CheckConstraint(r"name ~* '^\w+$'", name='alphanumeric_name'),
)

STORAGE_UNIT = Table(
    'storage_unit',
    _core.METADATA,
    Column('id', Integer, primary_key=True, autoincrement=True),
Ejemplo n.º 23
0
class Bid(Sessionized, Base):
    __tablename__ = "bids"

    STATUS = Enum("AUTH", "CHARGE", "REFUND", "VOID")

    # will be unique from authorize
    transaction = Column(BigInteger, primary_key=True, autoincrement=False)

    # identifying characteristics
    account_id = Column(BigInteger, index=True, nullable=False)
    pay_id = Column(BigInteger, index=True, nullable=False)
    thing_id = Column(BigInteger, index=True, nullable=False)

    # breadcrumbs
    ip = Column(Inet)
    date = Column(DateTime(timezone=True),
                  default=safunc.now(),
                  nullable=False)

    # bid information:
    bid = Column(Float, nullable=False)
    charge = Column(Float)

    status = Column(Integer, nullable=False, default=STATUS.AUTH)

    # make this a primary key as well so that we can have more than
    # one freebie per campaign
    campaign = Column(Integer, default=0, primary_key=True)

    @classmethod
    def _new(cls, trans_id, user, pay_id, thing_id, bid, campaign=0):
        bid = Bid(trans_id,
                  user,
                  pay_id,
                  thing_id,
                  getattr(request, 'ip', '0.0.0.0'),
                  bid=bid,
                  campaign=campaign)
        bid._commit()
        return bid


#    @classmethod
#    def for_transactions(cls, transids):
#        transids = filter(lambda x: x != 0, transids)
#        if transids:
#            q = cls.query()
#            q = q.filter(or_(*[cls.transaction == i for i in transids]))
#            return dict((p.transaction, p) for p in q)
#        return {}

    def set_status(self, status):
        if self.status != status:
            self.status = status
            self._commit()

    def auth(self):
        self.set_status(self.STATUS.AUTH)

    def is_auth(self):
        return (self.status == self.STATUS.AUTH)

    def void(self):
        self.set_status(self.STATUS.VOID)

    def is_void(self):
        return (self.status == self.STATUS.VOID)

    def charged(self):
        self.charge = self.bid
        self.set_status(self.STATUS.CHARGE)
        self._commit()

    def is_charged(self):
        """
        Returns True if transaction has been charged with authorize.net or is
        a freebie with "charged" status.
        """
        return (self.status == self.STATUS.CHARGE)

    def refund(self, amount):
        current_charge = self.charge or self.bid  # needed if charged() not
        # setting charge attr
        self.charge = current_charge - amount
        self.set_status(self.STATUS.REFUND)
        self._commit()

    def is_refund(self):
        return (self.status == self.STATUS.REFUND)

    @property
    def charge_amount(self):
        return self.charge or self.bid
Ejemplo n.º 24
0
class SongModel(Base):
    __tablename__ = "song"
    id = Column(Integer, primary_key=True, autoincrement=True)
    song_name = Column(String(length=100))
    duration_in_sec = Column(Integer)
    upload_time = Column(DateTime(timezone=True), default=func.now())
Ejemplo n.º 25
0
def load_world_bank_health_n_pop(only_metadata=False, force=False):
    """Loads the world bank health dataset, slices and a dashboard"""
    tbl_name = "wb_health_population"
    database = utils.get_example_database()
    table_exists = database.has_table_by_name(tbl_name)

    if not only_metadata and (not table_exists or force):
        data = get_example_data("countries.json.gz")
        pdf = pd.read_json(data)
        pdf.columns = [col.replace(".", "_") for col in pdf.columns]
        pdf.year = pd.to_datetime(pdf.year)
        pdf.to_sql(
            tbl_name,
            database.get_sqla_engine(),
            if_exists="replace",
            chunksize=50,
            dtype={
                "year": DateTime(),
                "country_code": String(3),
                "country_name": String(255),
                "region": String(255),
            },
            index=False,
        )

    print("Creating table [wb_health_population] reference")
    tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
    if not tbl:
        tbl = TBL(table_name=tbl_name)
    tbl.description = utils.readfile(os.path.join(EXAMPLES_FOLDER, "countries.md"))
    tbl.main_dttm_col = "year"
    tbl.database = database
    tbl.filter_select_enabled = True

    metrics = [
        "sum__SP_POP_TOTL",
        "sum__SH_DYN_AIDS",
        "sum__SH_DYN_AIDS",
        "sum__SP_RUR_TOTL_ZS",
        "sum__SP_DYN_LE00_IN",
        "sum__SP_RUR_TOTL",
    ]
    for m in metrics:
        if not any(col.metric_name == m for col in tbl.metrics):
            aggr_func = m[:3]
            col = str(column(m[5:]).compile(db.engine))
            tbl.metrics.append(
                SqlMetric(metric_name=m, expression=f"{aggr_func}({col})")
            )

    db.session.merge(tbl)
    db.session.commit()
    tbl.fetch_metadata()

    defaults = {
        "compare_lag": "10",
        "compare_suffix": "o10Y",
        "limit": "25",
        "granularity_sqla": "year",
        "groupby": [],
        "metric": "sum__SP_POP_TOTL",
        "metrics": ["sum__SP_POP_TOTL"],
        "row_limit": config.get("ROW_LIMIT"),
        "since": "2014-01-01",
        "until": "2014-01-02",
        "time_range": "2014-01-01 : 2014-01-02",
        "where": "",
        "markup_type": "markdown",
        "country_fieldtype": "cca3",
        "secondary_metric": {
            "aggregate": "SUM",
            "column": {
                "column_name": "SP_RUR_TOTL",
                "optionName": "_col_SP_RUR_TOTL",
                "type": "DOUBLE",
            },
            "expressionType": "SIMPLE",
            "hasCustomLabel": True,
            "label": "Rural Population",
        },
        "entity": "country_code",
        "show_bubbles": True,
    }

    print("Creating slices")
    slices = [
        Slice(
            slice_name="Region Filter",
            viz_type="filter_box",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                viz_type="filter_box",
                date_filter=False,
                filter_configs=[
                    {
                        "asc": False,
                        "clearable": True,
                        "column": "region",
                        "key": "2s98dfu",
                        "metric": "sum__SP_POP_TOTL",
                        "multiple": True,
                    },
                    {
                        "asc": False,
                        "clearable": True,
                        "key": "li3j2lk",
                        "column": "country_name",
                        "metric": "sum__SP_POP_TOTL",
                        "multiple": True,
                    },
                ],
            ),
        ),
        Slice(
            slice_name="World's Population",
            viz_type="big_number",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                since="2000",
                viz_type="big_number",
                compare_lag="10",
                metric="sum__SP_POP_TOTL",
                compare_suffix="over 10Y",
            ),
        ),
        Slice(
            slice_name="Most Populated Countries",
            viz_type="table",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                viz_type="table",
                metrics=["sum__SP_POP_TOTL"],
                groupby=["country_name"],
            ),
        ),
        Slice(
            slice_name="Growth Rate",
            viz_type="line",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                viz_type="line",
                since="1960-01-01",
                metrics=["sum__SP_POP_TOTL"],
                num_period_compare="10",
                groupby=["country_name"],
            ),
        ),
        Slice(
            slice_name="% Rural",
            viz_type="world_map",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                viz_type="world_map",
                metric="sum__SP_RUR_TOTL_ZS",
                num_period_compare="10",
            ),
        ),
        Slice(
            slice_name="Life Expectancy VS Rural %",
            viz_type="bubble",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                viz_type="bubble",
                since="2011-01-01",
                until="2011-01-02",
                series="region",
                limit=0,
                entity="country_name",
                x="sum__SP_RUR_TOTL_ZS",
                y="sum__SP_DYN_LE00_IN",
                size="sum__SP_POP_TOTL",
                max_bubble_size="50",
                filters=[
                    {
                        "col": "country_code",
                        "val": [
                            "TCA",
                            "MNP",
                            "DMA",
                            "MHL",
                            "MCO",
                            "SXM",
                            "CYM",
                            "TUV",
                            "IMY",
                            "KNA",
                            "ASM",
                            "ADO",
                            "AMA",
                            "PLW",
                        ],
                        "op": "not in",
                    }
                ],
            ),
        ),
        Slice(
            slice_name="Rural Breakdown",
            viz_type="sunburst",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                viz_type="sunburst",
                groupby=["region", "country_name"],
                since="2011-01-01",
                until="2011-01-01",
            ),
        ),
        Slice(
            slice_name="World's Pop Growth",
            viz_type="area",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                since="1960-01-01",
                until="now",
                viz_type="area",
                groupby=["region"],
            ),
        ),
        Slice(
            slice_name="Box plot",
            viz_type="box_plot",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                since="1960-01-01",
                until="now",
                whisker_options="Min/max (no outliers)",
                x_ticks_layout="staggered",
                viz_type="box_plot",
                groupby=["region"],
            ),
        ),
        Slice(
            slice_name="Treemap",
            viz_type="treemap",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                since="1960-01-01",
                until="now",
                viz_type="treemap",
                metrics=["sum__SP_POP_TOTL"],
                groupby=["region", "country_code"],
            ),
        ),
        Slice(
            slice_name="Parallel Coordinates",
            viz_type="para",
            datasource_type="table",
            datasource_id=tbl.id,
            params=get_slice_json(
                defaults,
                since="2011-01-01",
                until="2011-01-01",
                viz_type="para",
                limit=100,
                metrics=["sum__SP_POP_TOTL", "sum__SP_RUR_TOTL_ZS", "sum__SH_DYN_AIDS"],
                secondary_metric="sum__SP_POP_TOTL",
                series="country_name",
            ),
        ),
    ]
    misc_dash_slices.add(slices[-1].slice_name)
    for slc in slices:
        merge_slice(slc)

    print("Creating a World's Health Bank dashboard")
    dash_name = "World's Bank Data"
    slug = "world_health"
    dash = db.session.query(Dash).filter_by(slug=slug).first()

    if not dash:
        dash = Dash()
    dash.published = True
    js = textwrap.dedent(
        """\
{
    "CHART-36bfc934": {
        "children": [],
        "id": "CHART-36bfc934",
        "meta": {
            "chartId": 40,
            "height": 25,
            "sliceName": "Region Filter",
            "width": 2
        },
        "type": "CHART"
    },
    "CHART-37982887": {
        "children": [],
        "id": "CHART-37982887",
        "meta": {
            "chartId": 41,
            "height": 25,
            "sliceName": "World's Population",
            "width": 2
        },
        "type": "CHART"
    },
    "CHART-17e0f8d8": {
        "children": [],
        "id": "CHART-17e0f8d8",
        "meta": {
            "chartId": 42,
            "height": 92,
            "sliceName": "Most Populated Countries",
            "width": 3
        },
        "type": "CHART"
    },
    "CHART-2ee52f30": {
        "children": [],
        "id": "CHART-2ee52f30",
        "meta": {
            "chartId": 43,
            "height": 38,
            "sliceName": "Growth Rate",
            "width": 6
        },
        "type": "CHART"
    },
    "CHART-2d5b6871": {
        "children": [],
        "id": "CHART-2d5b6871",
        "meta": {
            "chartId": 44,
            "height": 52,
            "sliceName": "% Rural",
            "width": 7
        },
        "type": "CHART"
    },
    "CHART-0fd0d252": {
        "children": [],
        "id": "CHART-0fd0d252",
        "meta": {
            "chartId": 45,
            "height": 50,
            "sliceName": "Life Expectancy VS Rural %",
            "width": 8
        },
        "type": "CHART"
    },
    "CHART-97f4cb48": {
        "children": [],
        "id": "CHART-97f4cb48",
        "meta": {
            "chartId": 46,
            "height": 38,
            "sliceName": "Rural Breakdown",
            "width": 3
        },
        "type": "CHART"
    },
    "CHART-b5e05d6f": {
        "children": [],
        "id": "CHART-b5e05d6f",
        "meta": {
            "chartId": 47,
            "height": 50,
            "sliceName": "World's Pop Growth",
            "width": 4
        },
        "type": "CHART"
    },
    "CHART-e76e9f5f": {
        "children": [],
        "id": "CHART-e76e9f5f",
        "meta": {
            "chartId": 48,
            "height": 50,
            "sliceName": "Box plot",
            "width": 4
        },
        "type": "CHART"
    },
    "CHART-a4808bba": {
        "children": [],
        "id": "CHART-a4808bba",
        "meta": {
            "chartId": 49,
            "height": 50,
            "sliceName": "Treemap",
            "width": 8
        },
        "type": "CHART"
    },
    "COLUMN-071bbbad": {
        "children": [
            "ROW-1e064e3c",
            "ROW-afdefba9"
        ],
        "id": "COLUMN-071bbbad",
        "meta": {
            "background": "BACKGROUND_TRANSPARENT",
            "width": 9
        },
        "type": "COLUMN"
    },
    "COLUMN-fe3914b8": {
        "children": [
            "CHART-36bfc934",
            "CHART-37982887"
        ],
        "id": "COLUMN-fe3914b8",
        "meta": {
            "background": "BACKGROUND_TRANSPARENT",
            "width": 2
        },
        "type": "COLUMN"
    },
    "GRID_ID": {
        "children": [
            "ROW-46632bc2",
            "ROW-3fa26c5d",
            "ROW-812b3f13"
        ],
        "id": "GRID_ID",
        "type": "GRID"
    },
    "HEADER_ID": {
        "id": "HEADER_ID",
        "meta": {
            "text": "World's Bank Data"
        },
        "type": "HEADER"
    },
    "ROOT_ID": {
        "children": [
            "GRID_ID"
        ],
        "id": "ROOT_ID",
        "type": "ROOT"
    },
    "ROW-1e064e3c": {
        "children": [
            "COLUMN-fe3914b8",
            "CHART-2d5b6871"
        ],
        "id": "ROW-1e064e3c",
        "meta": {
            "background": "BACKGROUND_TRANSPARENT"
        },
        "type": "ROW"
    },
    "ROW-3fa26c5d": {
        "children": [
            "CHART-b5e05d6f",
            "CHART-0fd0d252"
        ],
        "id": "ROW-3fa26c5d",
        "meta": {
            "background": "BACKGROUND_TRANSPARENT"
        },
        "type": "ROW"
    },
    "ROW-46632bc2": {
        "children": [
            "COLUMN-071bbbad",
            "CHART-17e0f8d8"
        ],
        "id": "ROW-46632bc2",
        "meta": {
            "background": "BACKGROUND_TRANSPARENT"
        },
        "type": "ROW"
    },
    "ROW-812b3f13": {
        "children": [
            "CHART-a4808bba",
            "CHART-e76e9f5f"
        ],
        "id": "ROW-812b3f13",
        "meta": {
            "background": "BACKGROUND_TRANSPARENT"
        },
        "type": "ROW"
    },
    "ROW-afdefba9": {
        "children": [
            "CHART-2ee52f30",
            "CHART-97f4cb48"
        ],
        "id": "ROW-afdefba9",
        "meta": {
            "background": "BACKGROUND_TRANSPARENT"
        },
        "type": "ROW"
    },
    "DASHBOARD_VERSION_KEY": "v2"
}
    """
    )
    pos = json.loads(js)
    update_slice_ids(pos, slices)

    dash.dashboard_title = dash_name
    dash.position_json = json.dumps(pos, indent=4)
    dash.slug = slug

    dash.slices = slices[:-1]
    db.session.merge(dash)
    db.session.commit()
Ejemplo n.º 26
0
from sqlalchemy import Table, Column, Integer, String, DateTime, ForeignKey, LargeBinary, MetaData, Boolean, Interval

metadata = MetaData()

Jobs = Table('jobs', metadata,
    Column('jobid', Integer, primary_key=True),
    Column('torrentid', Integer, ForeignKey('torrents.torrentid')),
    Column('added', DateTime(False)),
    Column('bid', Integer),
    Column('downloaded', Integer),
    Column('size', Integer),
    Column('speed', Integer),
    Column('eta', Interval),
    Column('completed', DateTime(False)),
    Column('userid', Integer, ForeignKey('users.userid')),
    Column('downloadid', Integer, ForeignKey('downloads.downloadid')),
)

Downloads = Table('downloads', metadata,
    Column('downloadid', Integer, primary_key=True),
    Column('start_time', DateTime(False)),
    Column('ip', String),
    Column('link', String, nullable=False),
)

Torrents = Table('torrents', metadata,
    Column('torrentid', Integer, primary_key=True),
    Column('name', String),
    Column('torrent', LargeBinary),
    Column('magnet_link', String),
    Column('infohash', String, unique=True)
Ejemplo n.º 27
0
class _SSOUser(Base):
    __tablename__ = 'zato_sso_user'
    __table_args__ = (
        UniqueConstraint('username', name='zato_u_usrn_uq'),
        UniqueConstraint('user_id', name='zato_user_id_uq'),
        Index('zato_u_email_idx', 'email', unique=False, mysql_length={'email':767}),
        Index('zato_u_appr_stat_idx', 'approval_status', unique=False, mysql_length={'email':767}),
        Index('zato_u_dspn_idx', 'display_name_upper', unique=False),
        Index('zato_u_alln_idx', 'first_name_upper', 'middle_name_upper', 'last_name_upper', unique=False),
        Index('zato_u_lastn_idx', 'last_name_upper', unique=False),
        Index('zato_u_sigst_idx', 'sign_up_status', unique=False),
        Index('zato_u_sigctok_idx', 'sign_up_confirm_token', unique=True),
    {})

    # Not exposed publicly, used only for SQL joins
    id = Column(Integer, Sequence('zato_sso_user_id_seq'), primary_key=True)

    # Publicly visible
    user_id = Column(String(191), nullable=False)

    is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
    is_internal = Column(Boolean(), nullable=False, default=False)
    is_super_user = Column(Boolean(), nullable=False, default=False)
    is_locked = Column(Boolean(), nullable=False, default=False)
    locked_time = Column(DateTime(), nullable=True)

    # Creation metadata, e.g. what this user's remote IP was
    creation_ctx = Column(Text(), nullable=False)

    # Note that this is not an FK - this is on purpose to keep this information around
    # even if parent row is deleted.
    locked_by = Column(String(191), nullable=True)

    approval_status = Column(String(191), nullable=False)
    approval_status_mod_time = Column(DateTime(), nullable=False) # When user was approved or rejected
    approval_status_mod_by = Column(String(191), nullable=False) # Same comment as in locked_by

    # Basic information, always required
    username = Column(String(191), nullable=False)
    password = Column(Text(), nullable=False)
    password_is_set = Column(Boolean(), nullable=False)
    password_must_change = Column(Boolean(), nullable=False)
    password_last_set = Column(DateTime(), nullable=False)
    password_expiry = Column(DateTime(), nullable=False)

    # Sign-up information, possibly used in API workflows
    sign_up_status = Column(String(191), nullable=False)
    sign_up_time = Column(DateTime(), nullable=False)
    sign_up_confirm_time = Column(DateTime(), nullable=True)
    sign_up_confirm_token = Column(String(191), nullable=False)

    # Won't be always needed
    email = Column(Text(), nullable=True)

    # Various cultures don't have a notion of first or last name and display_name is the one that can be used in that case.
    display_name = Column(String(191), nullable=True)
    first_name = Column(String(191), nullable=True)
    middle_name = Column(String(191), nullable=True)
    last_name = Column(String(191), nullable=True)

    # Same as above but upper-cased for look-up / indexing purposes
    display_name_upper = Column(String(191), nullable=True)
    first_name_upper = Column(String(191), nullable=True)
    middle_name_upper = Column(String(191), nullable=True)
    last_name_upper = Column(String(191), nullable=True)

    # Rate limiting
    is_rate_limit_active = Column(Boolean(), nullable=True)
    rate_limit_type = Column(String(40), nullable=True)
    rate_limit_def = Column(Text(), nullable=True)
    rate_limit_check_parent_def = Column(Boolean(), nullable=True)

    # TOTP
    is_totp_enabled = Column(Boolean(), nullable=False, server_default=sa_false())
    totp_key = Column(Text(), nullable=True)
    totp_label = Column(Text(), nullable=True)

    # JSON data is here
    opaque1 = Column(_JSON(), nullable=True)
Ejemplo n.º 28
0
class Release(db.ModelBase):

    __tablename__ = "releases"

    @declared_attr
    def __table_args__(cls):  # noqa
        return (
            Index("release_created_idx", cls.created.desc()),
            Index("release_name_created_idx", cls.name, cls.created.desc()),
            Index("release_name_idx", cls.name),
            Index("release_pypi_hidden_idx", cls._pypi_hidden),
            Index("release_version_idx", cls.version),
        )

    __repr__ = make_repr("name", "version")

    name = Column(
        Text,
        ForeignKey("packages.name", onupdate="CASCADE"),
        primary_key=True,
    )
    version = Column(Text, primary_key=True)
    is_prerelease = orm.column_property(func.pep440_is_prerelease(version))
    author = Column(Text)
    author_email = Column(Text)
    maintainer = Column(Text)
    maintainer_email = Column(Text)
    home_page = Column(Text)
    license = Column(Text)
    summary = Column(Text)
    description = Column(Text)
    keywords = Column(Text)
    platform = Column(Text)
    download_url = Column(Text)
    _pypi_ordering = Column(Integer)
    _pypi_hidden = Column(Boolean)
    cheesecake_installability_id = Column(
        Integer,
        ForeignKey("cheesecake_main_indices.id"),
    )
    cheesecake_documentation_id = Column(
        Integer,
        ForeignKey("cheesecake_main_indices.id"),
    )
    cheesecake_code_kwalitee_id = Column(
        Integer,
        ForeignKey("cheesecake_main_indices.id"),
    )
    requires_python = Column(Text)
    description_from_readme = Column(Boolean)
    created = Column(
        DateTime(timezone=False),
        nullable=False,
        server_default=sql.func.now(),
    )

    _classifiers = orm.relationship(
        Classifier,
        backref="project_releases",
        secondary=lambda: release_classifiers,
        order_by=Classifier.classifier,
    )
    classifiers = association_proxy("_classifiers", "classifier")

    files = orm.relationship(
        "File",
        backref="release",
        cascade="all, delete-orphan",
        lazy="dynamic",
        order_by=lambda: File.filename,
    )

    dependencies = orm.relationship("Dependency")

    _requires = _dependency_relation(DependencyKind.requires)
    requires = association_proxy("_requires", "specifier")

    _provides = _dependency_relation(DependencyKind.provides)
    provides = association_proxy("_provides", "specifier")

    _obsoletes = _dependency_relation(DependencyKind.obsoletes)
    obsoletes = association_proxy("_obsoletes", "specifier")

    _requires_dist = _dependency_relation(DependencyKind.requires_dist)
    requires_dist = association_proxy("_requires_dist", "specifier")

    _provides_dist = _dependency_relation(DependencyKind.provides_dist)
    provides_dist = association_proxy("_provides_dist", "specifier")

    _obsoletes_dist = _dependency_relation(DependencyKind.obsoletes_dist)
    obsoletes_dist = association_proxy("_obsoletes_dist", "specifier")

    _requires_external = _dependency_relation(DependencyKind.requires_external)
    requires_external = association_proxy("_requires_external", "specifier")

    _project_urls = _dependency_relation(DependencyKind.project_url)
    project_urls = association_proxy("_project_urls", "specifier")

    uploader = orm.relationship(
        "User",
        secondary=lambda: JournalEntry.__table__,
        primaryjoin=lambda:
        ((JournalEntry.name == orm.foreign(Release.name)) &
         (JournalEntry.version == orm.foreign(Release.version)) &
         (JournalEntry.action == "new release")),
        secondaryjoin=lambda: (
            (User.username == orm.foreign(JournalEntry._submitted_by))),
        order_by=lambda: JournalEntry.submitted_date.desc(),
        # TODO: We have uselist=False here which raises a warning because
        # multiple items were returned. This should only be temporary because
        # we should add a nullable FK to JournalEntry so we don't need to rely
        # on ordering and implicitly selecting the first object to make this
        # happen,
        uselist=False,
        viewonly=True,
    )

    @property
    def urls(self):
        _urls = OrderedDict()

        if self.home_page:
            _urls["Homepage"] = self.home_page

        for urlspec in self.project_urls:
            name, url = urlspec.split(",", 1)
            _urls[name] = url

        if self.download_url and "Download" not in _urls:
            _urls["Download"] = self.download_url

        return _urls

    @property
    def has_meta(self):
        return any([
            self.keywords, self.author, self.author_email, self.maintainer,
            self.maintainer_email
        ])
Ejemplo n.º 29
0
class User(Base):
    __tablename__ = 'users'
    id = Column(Integer, primary_key=True)
    username = Column(String(64), unique=True, index=True)
    email = Column(String(64), unique=True, index=True)
    password_hash = Column(String(128))
    confirmed = Column(Boolean, default=False)
    name = Column(String(64))
    location = Column(String(64))
    about_me = Column(Text())
    occupation = Column(String(64))
    industry = Column(String(64))
    avatar_hash = Column(String(32))
    posts = relationship('Post', backref='author', lazy='dynamic')
    register_date = Column(DateTime(), default=datetime.utcnow)
    last_seen = Column(DateTime(), default=datetime.utcnow)
    role_id = Column(Integer, ForeignKey('roles.id'))

    def __init__(self, **kwargs):
        super(User, self).__init__(**kwargs)
        if self.role is None:
            if self.email == '*****@*****.**':
                self.role = session.query(Role).filter_by(
                    name='Administrator').first()
            if self.role is None:
                self.role = session.query(Role).filter_by(default=True).first()

    def can(self, permission):
        return self.role is not None and (self.role.permission
                                          & permission) == permission

    def is_administrator(self):
        return self.can(Permission.ADMINISTER)

    # def __init__(self, **kwargs):
    #     if self.email is not None and self.avatar_hash is None:
    #         self.avatar_hash = hashlib.md5(self.email.encode('utf-8')).hexdigest()

    @property
    def password(self):
        raise AttributeError('password is not a readable attribute')

    @password.setter
    def password(self, password):
        self.password_hash = generate_password_hash(password)

    def verify_password(self, password):
        return check_password_hash(self.password_hash, password)

    def generate_confirmation_token(self, expiration=3600):
        s = Serializer('some hard to guess key', expires_in=expiration)
        return s.dumps({"confirm": self.id}).decode('utf-8')

    def confirm(self, token):
        s = Serializer('some hard to guess key')
        try:
            data = s.loads(token.encode('utf-8'))
        except:
            return False
        if data.get('confirm') != self.id:
            return False
        self.confirmed = True
        session.add(self)
        session.commit()
        return True

    def gravatar(self, size=200, default='monsterid', ratting='g'):
        url = 'http://www.gravatar.com/avatar'
        hash = self.avatar_hash or hashlib.md5(
            self.email.encode('utf-8')).hexdigest()
        return '{url}/{hash}?s={size}&d={default}&r={ratting}'.format(
            url=url, hash=hash, size=size, default=default, ratting=ratting)

    @staticmethod
    def generate_fake(count=100):
        from sqlalchemy.exc import IntegrityError
        from random import seed
        import forgery_py

        seed()
        for i in range(count):
            u = User(email=forgery_py.internet.email_address(),
                     username=forgery_py.internet.user_name(),
                     password=forgery_py.lorem_ipsum.word(),
                     confirmed=True,
                     name=forgery_py.name.full_name(),
                     location=forgery_py.address.city(),
                     about_me=forgery_py.lorem_ipsum.sentence(),
                     register_date=forgery_py.date.date(True))
            session.add(u)
            try:
                session.commit()
            except IntegrityError:
                session.rollback()
Ejemplo n.º 30
0
 def is_late(self):
     return ((DateTime.now() > self.deadline) and (self.delivered == False))
Ejemplo n.º 31
0
class DBGrant(Base):
    """
    SAS DB Grant class
    """
    __tablename__ = "grants"
    id = Column(Integer, primary_key=True, autoincrement=True)
    state_id = Column(
        Integer,
        ForeignKey(
            "grant_states.id",
            ondelete="CASCADE",
        ),
        nullable=False,
    )
    cbsd_id = Column(Integer, ForeignKey("cbsds.id", ondelete="CASCADE"))
    channel_id = Column(Integer, ForeignKey("channels.id", ondelete="CASCADE"))
    grant_id = Column(String, nullable=False)
    grant_expire_time = Column(DateTime(timezone=True))
    transmit_expire_time = Column(DateTime(timezone=True))
    heartbeat_interval = Column(Integer)
    last_heartbeat_request_time = Column(DateTime(timezone=True))
    channel_type = Column(String)
    created_date = Column(
        DateTime(timezone=True),
        nullable=False,
        server_default=now(),
    )
    updated_date = Column(
        DateTime(timezone=True),
        server_default=now(),
        onupdate=now(),
    )

    state = relationship(
        "DBGrantState",
        back_populates="grants",
        cascade="all, delete",
        passive_deletes=True,
    )
    responses = relationship(
        "DBResponse",
        back_populates="grant",
        cascade="all, delete",
        passive_deletes=True,
    )
    cbsd = relationship(
        "DBCbsd",
        back_populates="grants",
        cascade="all, delete",
        passive_deletes=True,
    )
    channel = relationship(
        "DBChannel",
        back_populates="grants",
        cascade="all, delete",
        passive_deletes=True,
    )

    def __repr__(self):
        """
        Return string representation of DB object
        """
        class_name = self.__class__.__name__
        state_name = self.state.name
        return f"<{class_name}(id='{self.id}', " \
            f"state='{state_name}', " \
            f"cbsd_id='{self.cbsd_id}', " \
            f"grant_id='{self.grant_id}', " \
            f"grant_expire_time='{self.grant_expire_time}', " \
            f"transmit_expire_time='{self.transmit_expire_time}', " \
            f"heartbeat_interval='{self.heartbeat_interval}', " \
            f"last_heartbeat_request_time='{self.last_heartbeat_request_time}', " \
            f"channel_type='{self.channel_type}', " \
            f"created_date='{self.created_date}' " \
            f"updated_date='{self.updated_date}')>"
Ejemplo n.º 32
0
from datetime import datetime
 
class IntegerDateTime(TypeDecorator):
    """
    Used for working with epoch timestamps.
 
    Converts datetimes into epoch on the way in.
    Converts epoch timestamps to datetimes on the way out.
    """
    impl = Integer()
    def process_bind_param(self, value, dialect):
        return mktime(value.timetuple())
    def process_result_value(self, value, dialect):
        return datetime.fromtimestamp(value)

QuasselDateTime = DateTime()
QuasselDateTime = QuasselDateTime.with_variant(IntegerDateTime(), 'sqlite')

class Base(object):
    def _filter_properties(self):
        # this function decides which properties should be exposed through repr
        # todo: don't show methods
        properties = self.__dict__.keys()
        for prop in properties:
            if not prop.startswith('_'):
                yield (prop, getattr(self, prop))
        return

    def __repr__(self):    
        prop_tuples = self._filter_properties()
        prop_string_tuples = ('{0}={1}'.format(*prop) for prop in prop_tuples)
Ejemplo n.º 33
0
cookies = Table('cookies', metadata,
                Column('cookie_id', Integer(), primary_key=True),
                Column('cookie_name', String(50), index=True),
                Column('cookie_recipe_url', String(255)),
                Column('cookie_sku', String(55)),
                Column('quantity', Integer()),
                Column('unit_cost', Numeric(12, 2)))

users = Table(
    'users', metadata, Column('user_id', Integer(), primary_key=True),
    Column('customer_number', Integer(), autoincrement=True),
    Column('username', String(15), nullable=False, unique=True),
    Column('email_address', String(255), nullable=False),
    Column('phone', String(20), nullable=False),
    Column('password', String(25), nullable=False),
    Column('created_on', DateTime(), default=datetime.now),
    Column('updated_on',
           DateTime(),
           default=datetime.now,
           onupdate=datetime.now))

orders = Table('orders', metadata,
               Column('order_id', Integer(), primary_key=True),
               Column('user_id', ForeignKey('users.user_id')))

line_items = Table('line_items', metadata,
                   Column('line_items_id', Integer(), primary_key=True),
                   Column('order_id', ForeignKey('orders.order_id')),
                   Column('cookie_id', ForeignKey('cookies.cookie_id')),
                   Column('quantity', Integer()),
                   Column('extended_cost', Numeric(12, 2)))