# -*- coding: utf-8 -*-
"""The application's model objects"""

from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.orm import scoped_session, sessionmaker
#from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
from history_meta import VersionedListener

# Global session manager: DBSession() returns the Thread-local
# session object appropriate for the current web request.
maker = sessionmaker(
    autoflush=True,
    autocommit=False,
    extension=[ZopeTransactionExtension(),
               VersionedListener()])
DBSession = scoped_session(maker)

# Base class for all of our model classes: By default, the data model is
# defined with SQLAlchemy's declarative extension, but if you need more
# control, you can switch to the traditional method.
DeclarativeBase = declarative_base()

# There are two convenient ways for you to spare some typing.
# You can have a query property on all your model classes by doing this:
# DeclarativeBase.query = DBSession.query_property()
# Or you can use a session-aware mapper as it was used in TurboGears 1:
# DeclarativeBase = declarative_base(mapper=DBSession.mapper)

# Global metadata.
# The default metadata is the one from the declarative base.
Exemple #2
0
# -*- coding: utf-8 -*-
"""The application's model objects"""

from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base

# Global session manager: DBSession() returns the Thread-local
# session object appropriate for the current web request.
maker = sessionmaker(autoflush=True,
                     autocommit=False,
                     extension=ZopeTransactionExtension())
DBSession = scoped_session(maker)

# Base class for all of our model classes: By default, the data model is
# defined with SQLAlchemy's declarative extension, but if you need more
# control, you can switch to the traditional method.
DeclarativeBase = declarative_base()

# There are two convenient ways for you to spare some typing.
# You can have a query property on all your model classes by doing this:
# DeclarativeBase.query = DBSession.query_property()
# Or you can use a session-aware mapper as it was used in TurboGears 1:
# DeclarativeBase = declarative_base(mapper=DBSession.mapper)

# Global metadata.
# The default metadata is the one from the declarative base.
metadata = DeclarativeBase.metadata

# If you have multiple databases with overlapping table names, you'll need a
# metadata for each database. Feel free to rename 'metadata2'.
Exemple #3
0
from sqlalchemy import (Column, Index, Integer, BigInteger, Text, Date, String,
                        DECIMAL, ForeignKey)

from sqlalchemy.ext.declarative import declarative_base

from sqlalchemy.orm import (scoped_session, sessionmaker, relationship,
                            backref)

from zope.sqlalchemy import ZopeTransactionExtension
from Enums import StatusConsolidacao

#exprime_on_commit  usado pra que nao de o erro de detached na hora usa os objetos depois de serem buscados no banco.
DBSession = scoped_session(
    sessionmaker(extension=ZopeTransactionExtension(), expire_on_commit=False))
Base = declarative_base()

#Index('my_index', MyModel.name, unique=True, mysql_length=255)


# Classes de dominio ///////////////////////////////
class ArquivoAdquirente(Base):
    __tablename__ = 'arquivo_adquirente'
    id = Column(Integer, primary_key=True)
    nome = Column(Text)
    destinatario = Column(Text)
    cadeia = Column(Text)
    adquirente = Column(Text)
    extrato = Column(Text)
    data_arquivo = Column(Date)
    data_criacao = Column(Date)
    periodo_inicial = Column(Date)
Exemple #4
0
def load_sqlalchemy_db():
    print "\nLoading pickled database %s" % sys.argv[2]
    db = file(sys.argv[2], 'r')
    data = pickle.load(db)

    import transaction
    from bodhi.models import Base
    from bodhi.models import Release, Update, Build, Comment, User, Bug, CVE
    from bodhi.models import Package, Group
    from bodhi.models import UpdateType, UpdateStatus, UpdateRequest
    from sqlalchemy import create_engine
    from sqlalchemy.orm.exc import NoResultFound

    # Caches for quick lookup
    releases = {}
    packages = {}
    users = {}
    critpath = {}

    aliases = []

    engine = bodhi.config['sqlalchemy.url']
    Session = scoped_session(
        sessionmaker(extension=ZopeTransactionExtension()))
    Session.configure(bind=engine)
    db = Session()

    # Allow filtering of releases to load
    whitelist = []
    if '--release' in sys.argv:
        for r in sys.argv[sys.argv.index('--release') + 1].split(','):
            whitelist.append(r)
        print('whitelist = %r' % whitelist)

    # Legacy format was just a list of update dictionaries
    # Now we'll pull things out into an organized dictionary:
    # {'updates': [], 'releases': []}
    if isinstance(data, dict):
        for release in data['releases']:
            try:
                db.query(Release).filter_by(name=release['name']).one()
            except NoResultFound:
                del (release['metrics'])
                del (release['locked'])
                r = Release(**release)
                r.stable_tag = "%s-updates" % r.dist_tag
                r.testing_tag = "%s-testing" % r.stable_tag
                r.candidate_tag = "%s-candidate" % r.stable_tag
                r.pending_testing_tag = "%s-pending" % r.testing_tag
                r.pending_stable_tag = "%s-pending" % r.stable_tag
                r.override_tag = "%s-override" % r.dist_tag
                db.add(r)
        data = data['updates']

    progress = ProgressBar(widgets=[SimpleProgress(), Percentage(), Bar()])

    for u in progress(data):
        try:
            release = releases[u['release'][0]]
        except KeyError:
            try:
                release = db.query(Release).filter_by(
                    name=u['release'][0]).one()
            except NoResultFound:
                release = Release(name=u['release'][0],
                                  long_name=u['release'][1],
                                  id_prefix=u['release'][2],
                                  dist_tag=u['release'][3])
                db.add(release)
            releases[u['release'][0]] = release
            if whitelist:
                if release.name in whitelist:
                    critpath[release.name] = get_critpath_pkgs(
                        release.name.lower())
                    print('%s critpath packages for %s' %
                          (len(critpath[release.name]), release.name))
            else:
                critpath[release.name] = get_critpath_pkgs(
                    release.name.lower())
                print('%s critpath packages for %s' %
                      (len(critpath[release.name]), release.name))

        if whitelist and release.name not in whitelist:
            continue

        ## Backwards compatbility
        request = u['request']
        if u['request'] == 'move':
            u['request'] = 'stable'
        elif u['request'] == 'push':
            u['request'] = 'testing'
        elif u['request'] == 'unpush':
            u['request'] = 'obsolete'
        if u['approved'] not in (True, False):
            u['approved'] = None
        if u.has_key('update_id'):
            u['updateid'] = u['update_id']
            u['alias'] = u['update_id']

            if u['alias']:
                split = u['alias'].split('-')
                year, id = split[-2:]
                aliases.append((int(year), int(id)))

        if not u.has_key('date_modified'):
            u['date_modified'] = None

        # Port to new enum types
        if u['request']:
            if u['request'] == 'stable':
                u['request'] = UpdateRequest.stable
            elif u['request'] == 'testing':
                u['request'] = UpdateRequest.testing
            else:
                raise Exception("Unknown request: %s" % u['request'])

        if u['type'] == 'bugfix':
            u['type'] = UpdateType.bugfix
        elif u['type'] == 'newpackage':
            u['type'] = UpdateType.newpackage
        elif u['type'] == 'enhancement':
            u['type'] = UpdateType.enhancement
        elif u['type'] == 'security':
            u['type'] = UpdateType.security
        else:
            raise Exception("Unknown type: %r" % u['type'])

        if u['status'] == 'pending':
            u['status'] = UpdateStatus.pending
        elif u['status'] == 'testing':
            u['status'] = UpdateStatus.testing
        elif u['status'] == 'obsolete':
            u['status'] = UpdateStatus.obsolete
        elif u['status'] == 'stable':
            u['status'] = UpdateStatus.stable
        elif u['status'] == 'unpushed':
            u['status'] = UpdateStatus.unpushed
        else:
            raise Exception("Unknown status: %r" % u['status'])

        try:
            update = db.query(Update).filter_by(title=u['title']).one()
            continue
        except NoResultFound:
            update = Update(
                title=u['title'],
                date_submitted=u['date_submitted'],
                date_pushed=u['date_pushed'],
                date_modified=u['date_modified'],
                release=release,
                old_updateid=u['updateid'],
                alias=u['updateid'],
                pushed=u['pushed'],
                notes=u['notes'],
                karma=u['karma'],
                type=u['type'],
                status=u['status'],
                request=u['request'],
            )
            #approved=u['approved'])
            db.add(update)
            db.flush()

            try:
                user = users[u['submitter']]
            except KeyError:
                try:
                    user = db.query(User).filter_by(name=u['submitter']).one()
                except NoResultFound:
                    user = User(name=u['submitter'])
                    db.add(user)
                    db.flush()
                users[u['submitter']] = user
            user.updates.append(update)

        ## Create Package and Build objects
        for pkg, nvr in u['builds']:
            try:
                package = packages[pkg]
            except KeyError:
                try:
                    package = db.query(Package).filter_by(name=pkg).one()
                except NoResultFound:
                    package = Package(name=pkg)
                    db.add(package)
                packages[pkg] = package
            if package.name in critpath[update.release.name]:
                update.critpath = True
            try:
                build = db.query(Build).filter_by(nvr=nvr).one()
            except NoResultFound:
                build = Build(nvr=nvr, package=package)
                db.add(build)
                update.builds.append(build)

        ## Create all Bugzilla objects for this update
        for bug_num, bug_title, security, parent in u['bugs']:
            try:
                bug = db.query(Bug).filter_by(bug_id=bug_num).one()
            except NoResultFound:
                bug = Bug(bug_id=bug_num,
                          security=security,
                          parent=parent,
                          title=bug_title)
                db.add(bug)
            update.bugs.append(bug)

        ## Create all CVE objects for this update
        for cve_id in u['cves']:
            try:
                cve = db.query(CVE).filter_by(cve_id=cve_id).one()
            except NoResultFound:
                cve = CVE(cve_id=cve_id)
                db.add(cve)
            update.cves.append(cve)

        ## Create all Comments for this update
        for c in u['comments']:
            try:
                timestamp, author, text, karma, anonymous = c
            except ValueError:
                timestamp, author, text, karma = c
                anonymous = '@' in author

            comment = Comment(timestamp=timestamp,
                              text=text,
                              karma=karma,
                              anonymous=anonymous)
            db.add(comment)
            db.flush()
            update.comments.append(comment)
            if anonymous:
                name = u'anonymous'
            else:
                name = author
            group = None
            if not anonymous and ' (' in name:
                split = name.split(' (')
                name = split[0]
                group = split[1][:-1]
                assert group, name
            try:
                user = users[name]
            except KeyError:
                try:
                    user = db.query(User).filter_by(name=name).one()
                except NoResultFound:
                    user = User(name=name)
                    db.add(user)
                    db.flush()
                users[name] = user

            comment.user = user

            if group:
                try:
                    group = db.query(Group).filter_by(name=group).one()
                except NoResultFound:
                    group = Group(name=group)
                    db.add(group)
                    db.flush()
                user.groups.append(group)

        db.flush()

    # Hack to get the Bodhi2 alias generator working with bodhi1 data.
    # The new generator assumes that the alias is assigned at submission time, as opposed to push time.
    year, id = max(aliases)
    print('Highest alias = %r %r' % (year, id))
    up = db.query(Update).filter_by(alias=u'FEDORA-%s-%s' % (year, id)).one()
    print(up.title)
    up.date_submitted = up.date_pushed
    db.flush()

    transaction.commit()

    print("\nDatabase migration complete!")
    print(" * %d updates" % db.query(Update).count())
    print(" * %d builds" % db.query(Build).count())
    print(" * %d comments" % db.query(Comment).count())
    print(" * %d users" % db.query(User).count())
    print(" * %d bugs" % db.query(Bug).count())
    print(" * %d CVEs" % db.query(CVE).count())
Exemple #5
0
    String,
    Unicode,
    UnicodeText,
)
from sqlalchemy.dialects.postgresql import INET
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
    relationship,
    sessionmaker,
)
from zope.sqlalchemy import ZopeTransactionExtension

from logcabin.models.types import URLSegment, EmailAddress
from logcabin.lib.formats import camel_registry

Session = sessionmaker(extension=ZopeTransactionExtension())
Base = declarative_base()


class Resource(object):
    __acl__ = (
        (Allow, Authenticated, "view"),
        (Allow, "verified", "import"),
        (Allow, "admin", "admin"),
    )


class User(Base, Resource):
    __tablename__ = "users"
    __table_args__ = (
        CheckConstraint("""
from sqlalchemy.types import TypeDecorator
from sqlalchemy.types import VARCHAR
from zope.sqlalchemy import ZopeTransactionExtension

from sandglass.time.security import Administrators
from sandglass.time.security import PERMISSION
from sandglass.time.utils import get_app_namespace
from sandglass.time.utils import mixedmethod

META = MetaData()

# Check if unittest is being run before creating session
if not os.environ.get('TESTING'):
    DBSESSION = scoped_session(
        # Integrate transaction manager with SQLAlchemy
        sessionmaker(extension=ZopeTransactionExtension())
    )
else:
    # When unittests are run use a non scoped session
    DBSESSION = sessionmaker(extension=ZopeTransactionExtension())

# Dictionary used to map model class names to class definitions
MODEL_REGISTRY = weakref.WeakValueDictionary()

# Default ACL rules for all models.
# Rules allow full access to admin group and deny access
# to anyone that didn't match a previous acces rule.
DEFAULT_ACL = [
    (Allow, Administrators, ALL_PERMISSIONS),
    # Last rule to deny all if no rule matched before
    (Deny, Everyone, ALL_PERMISSIONS)
Exemple #7
0
def ping_connection(dbapi_connection, connection_record, connection_proxy):
    cursor = dbapi_connection.cursor()
    try:
        cursor.execute("SELECT 1")
    except:  # pragma: no cover
        # optional - dispose the whole pool
        # instead of invalidating one at a time
        connection_proxy._pool.dispose()

        # raise DisconnectionError - pool will try
        # connecting again up to three times before raising.
        raise exc.DisconnectionError()
    cursor.close()


DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
VersionedDBSession = scoped_session(versioned_session(
    sessionmaker(autoflush=False, extension=ZopeTransactionExtension())))


class JSONEncodedDict(TypeDecorator):
    """Represents an immutable structure as a json-encoded string.

    Usage::

        JSONEncodedDict(255)
    """
    impl = VARCHAR

    def process_bind_param(self, value, dialect):
        if value is not None:
Exemple #8
0
# Import only version 1 API with "import *"
__all__ = ["add_engine", "get_base", "get_session", "get_engine"]

# VERSION 2 API

class AttributeContainer(object):
    def _clear(self):
        """Delete all instance attributes. For internal use only."""
        self.__dict__.clear()

engines = AttributeContainer()
bases = AttributeContainer()
sessions = AttributeContainer()

_zte = ZopeTransactionExtension()

def set_default_engine(engine):
    engines.default = engine
    bases.default.metadata.bind = engine
    sessions.default.remove()
    sessions.default.configure(bind=engine)

def reset():
    """Restore the initial module state, deleting all modifications.
    
    This function is mainly for unit tests and debugging. It undoes all
    customizations and reverts to the initial module state.
    """
    engines._clear()
    bases._clear()
Exemple #9
0
def _make_session():
    return scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Exemple #10
0
# -*- coding: utf-8 -*-
from __future__ import absolute_import

from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.orm import scoped_session, sessionmaker

# from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base

# Global session manager: DBSession() returns the Thread-local
# session object appropriate for the current web request.
maker = sessionmaker(
    autoflush=True, autocommit=False, extension=ZopeTransactionExtension()
)
DBSession = scoped_session(maker)

# Base class for all of our model classes: By default, the data model is
# defined with SQLAlchemy's declarative extension, but if you need more
# control, you can switch to the traditional method.
DeclarativeBase = declarative_base()

# There are two convenient ways for you to spare some typing.
# You can have a query property on all your model classes by doing this:
DeclarativeBase.query = DBSession.query_property()
DeclarativeBase.get = classmethod(lambda cls, ident: cls.query.get(ident))
# Or you can use a session-aware mapper as it was used in TurboGears 1:
# DeclarativeBase = declarative_base(mapper=DBSession.mapper)
Exemple #11
0
from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
from sqlalchemy.sql import func
from pyramid.session import SignedCookieSessionFactory
from sqlalchemy_mptt import mptt_sessionmaker
from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.ext.declarative import declarative_base

from pyramid_pages.models import FlatPageMixin, MpttPageMixin, RedirectMixin
from pyramid_pages.resources import (BasePageResource, resource_of_node,
                                     resources_of_config)

Base = declarative_base()
DBSession = scoped_session(
    mptt_sessionmaker(sessionmaker(extension=ZopeTransactionExtension())))

CONFIG_SQLALCHEMY_URL = 'sqlalchemy.url'
CONFIG_PYRAMID_PAGES_MODELS = 'pyramid_pages.models'
CONFIG_PYRAMID_PAGES_DBSESSION = 'pyramid_pages.dbsession'


class BasePage(Base, RedirectMixin):
    __tablename__ = 'base_pages'
    id = Column(Integer, primary_key=True)
    page_type = Column(String(50))

    __mapper_args__ = {
        'polymorphic_identity': 'base_page',
        'polymorphic_on': page_type,
        'with_polymorphic': '*'
Exemple #12
0
def load_references():
    # create session
    engine = create_engine(os.environ['NEX2_URI'])
    session_factory = sessionmaker(bind=engine,
                                   extension=ZopeTransactionExtension())
    db_session = scoped_session(session_factory)
    # some preparation
    pmid_to_reference_id = dict([
        (x.pmid, x.dbentity_id)
        for x in db_session.query(Referencedbentity).all()
    ])
    pmid_to_curation_id = dict([
        (x.pmid, x.curation_id)
        for x in db_session.query(Referencetriage).all()
    ])
    pmid_to_refdeleted_id = dict([
        (x.pmid, x.referencedeleted_id)
        for x in db_session.query(Referencedeleted).all()
    ])

    # get gene names to highlight
    gene_list = []
    all_loci = db_session.query(Locusdbentity).all()
    for x in all_loci:
        if len(x.systematic_name) > 12 or len(x.systematic_name) < 4:
            continue
        gene_list.append(str(x.systematic_name.upper()))
        if x.gene_name and x.gene_name != x.systematic_name:
            gene_list.append(str(x.gene_name.upper()))
    alias_to_name = {}
    for x in db_session.query(LocusAlias).all():
        if x.alias_type not in ['Uniform', 'Non-uniform']:
            continue
        if len(x.display_name) < 4:
            continue
        name = x.locus.gene_name if x.locus.gene_name else x.locus.systematic_name
        alias_to_name[x.display_name] = name
    # get new PMIDs
    log.info(str(datetime.now()))
    log.info("Getting PMID list...")
    pmid_list = get_pmid_list(TERMS, RETMAX, DAY)
    pmids = []
    for pmid in pmid_list:
        if int(pmid) in pmid_to_reference_id:
            continue
        if int(pmid) in pmid_to_curation_id:
            continue
        if int(pmid) in pmid_to_refdeleted_id:
            continue
        pmids.append(pmid)

    if len(pmids) == 0:
        log.info("No new papers")
        return
    # get data for each PMID entry
    log.info(str(datetime.now()))
    log.info("Getting Pubmed records...")
    records = get_pubmed_record(','.join(pmids))
    i = 1
    for rec in records:
        rec_file = StringIO(rec)
        record = Medline.read(rec_file)
        pmid = record.get('PMID')
        pubmed_url = 'http://www.ncbi.nlm.nih.gov/pubmed/' + str(pmid)
        doi_url = ""
        if record.get('AID'):
            # ['S0167-7012(17)30042-8 [pii]', '10.1016/j.mimet.2017.02.002 [doi]']
            doi = None
            for id in record['AID']:
                if id.endswith('[doi]'):
                    doi = id.replace(' [doi]', '')
                    break
            if doi:
                doi_url = "/".join(['http://dx.doi.org', doi])
        title = record.get('TI', '')
        authors = record.get('AU', [])
        pubdate = record.get('DP', '')  # 'PubDate': '2012 Mar 20'
        year = pubdate.split(' ')[0]
        journal = record.get('TA', '')
        volume = record.get('VI', '')
        issue = record.get('IP', '')
        pages = record.get('PG', '')
        citation = set_cite(title, authors, year, journal, volume, issue,
                            pages)
        abstract = record.get('AB', '')
        gene_names = extract_gene_names(abstract, gene_list, alias_to_name)

        # pmid = int(record.get('Id'))
        # pubmed_url = 'https://www.ncbi.nlm.nih.gov/pubmed/' + str(pmid)
        # doi_url = ""
        # if record.get('DOI'):
        #     doi = record.get('DOI')
        #     doi_url = "/".join(['http://dx.doi.org', doi])
        # title = record.get('Title', '')
        # authors = record.get('AuthorList', [])
        # pubdate = record.get('PubDate', '')  # 'PubDate': '2012 Mar 20'
        # year = pubdate.split(' ')[0]
        # journal = record.get('FullJournalName', '')
        # volume = record.get('Volume', '')
        # issue = record.get('Issue', '')
        # pages = record.get('Pages', '')
        # citation = set_cite(title, authors, year, journal, volume, issue, pages)
        # abstract = get_abstract(pmid)
        # gene_names = extract_gene_names(abstract, gene_list, alias_to_name)

        # insert formatted data to DB
        insert_reference(db_session, pmid, citation, doi_url, abstract,
                         " ".join(gene_names))
    log.info("Done!")
Exemple #13
0
from datetime import datetime, timedelta
import json
import random
import logging
import re
import hashlib
import requests
import socket
from beaker import cache
from urllib.parse import urlencode
from urllib.request import urlopen

log = logging.getLogger(__name__)

DBSession = scoped_session(
    sessionmaker(extension=ZopeTransactionExtension(keep_session=False)))
prng = random.SystemRandom()


class Base(object):
    @classmethod
    def one(cls, **kwargs):
        return DBSession.query(cls).filter_by(**kwargs).one()

    @classmethod
    def all(cls, **kwargs):
        return DBSession.query(cls).filter_by(**kwargs).all()


Base = declarative_base(cls=Base)
Exemple #14
0
# -*- coding: utf-8 -*-
"""The application's model objects"""

from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.orm import scoped_session, sessionmaker
#from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
from itweb.lib.history_meta import VersionedListener

# Global session manager: DBSession() returns the Thread-local
# session object appropriate for the current web request.
maker = sessionmaker(autoflush=True, autocommit=False,
                     extension=[ZopeTransactionExtension(), VersionedListener()])
DBSession = scoped_session(maker)

# Base class for all of our model classes: By default, the data model is
# defined with SQLAlchemy's declarative extension, but if you need more
# control, you can switch to the traditional method.
DeclarativeBase = declarative_base()

# There are two convenient ways for you to spare some typing.
# You can have a query property on all your model classes by doing this:
# DeclarativeBase.query = DBSession.query_property()
# Or you can use a session-aware mapper as it was used in TurboGears 1:
# DeclarativeBase = declarative_base(mapper=DBSession.mapper)

# Global metadata.
# The default metadata is the one from the declarative base.
metadata = DeclarativeBase.metadata

# If you have multiple databases with overlapping table names, you'll need a
Exemple #15
0
 def setUp(self):
     Base.metadata.drop_all(self.engine)
     Base.metadata.create_all(self.engine)
     Base.metadata.bind = self.engine
     self.session_maker = sessionmaker(bind=self.engine,
                                       extension=ZopeTransactionExtension())
Exemple #16
0
# pyramidapp/models.py
import datetime
from sqlalchemy import Column, Integer, Text, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
from zope.sqlalchemy import ZopeTransactionExtension

DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) #, autocommit=True))
Base = declarative_base()

class LogModel(Base):
    __tablename__ = 'urlaccess'
    id = Column(Integer, primary_key=True)
    idsession = Column(Text)
    endpoint = Column(Text)
    datahora = Column(DateTime, default=datetime.datetime.utcnow)

    def __init__(self, idsession, endpoint):
        self.idsession = idsession
        self.endpoint = endpoint
Exemple #17
0
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import sessionmaker
from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.ext.declarative import declarative_base

DBSession = scoped_session(
    sessionmaker(extension=ZopeTransactionExtension(), autoflush=False))
Base = declarative_base()
Exemple #18
0
def main(argv=sys.argv):
    settings_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                 'loadtests.ini')
    settings = get_appsettings(settings_file)

    engine = engine_from_config(settings, 'sqlalchemy.')

    logging.basicConfig()
    logging.getLogger('sqlalchemy.engine').setLevel(logging.WARN)

    Session = sessionmaker(extension=ZopeTransactionExtension())  # noqa
    session = Session(bind=engine)

    with transaction.manager:
        for i in range(1, NB_USERS_TO_CREATE + 1):
            username = BASE_USERNAME + str(i)
            password = username
            email = username + '@foo.bar'
            lang = 'fr'

            profile = UserProfile(
                categories=['amateur'],
                geometry=DocumentGeometry(version=1,
                                          geom=None,
                                          geom_detail=None),
                locales=[DocumentLocale(lang=lang, title='')])
            user = User(username=username,
                        forum_username=username,
                        name=username,
                        email=email,
                        lang=lang,
                        password=password,
                        profile=profile)
            # make sure user account is directly validated
            user.clear_validation_nonce()
            user.email_validated = True

            session.add(user)
            session.flush()

            # also create a version for the profile
            # code from DocumentRest.create_new_version
            archive = user.profile.to_archive()
            archive_locales = user.profile.get_archive_locales()
            archive_geometry = user.profile.get_archive_geometry()
            meta_data = HistoryMetaData(comment='creation', user_id=user.id)
            versions = []
            for locale in archive_locales:
                version = DocumentVersion(
                    document_id=user.profile.document_id,
                    lang=locale.lang,
                    document_archive=archive,
                    document_locales_archive=locale,
                    document_geometry_archive=archive_geometry,
                    history_metadata=meta_data)
                versions.append(version)
            session.add(archive)
            session.add_all(archive_locales)
            session.add(meta_data)
            session.add_all(versions)
            session.flush()

    print('Created %d users with base username `%s`' %
          (NB_USERS_TO_CREATE, BASE_USERNAME))
def main(argv=sys.argv):
    from fixtures.data import trees, geo
    from fixtures.styles_and_cultures import styles_and_cultures
    from fixtures.materials import materials
    from fixtures.eventtypes import eventtypes
    from fixtures.heritagetypes import heritagetypes
    from fixtures.periods import periods
    from fixtures.species import species
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    session_maker = sessionmaker(bind=engine,
                                 extension=ZopeTransactionExtension())
    db_session = session_maker()
    with transaction.manager:
        import_provider(
            trees,
            ConceptScheme(id=1,
                          uri='urn:x-skosprovider:trees',
                          labels=[
                              Label('Verschillende soorten bomen',
                                    u'prefLabel', u'nl'),
                              Label('Different types of trees', u'prefLabel',
                                    u'en')
                          ]), db_session)
        import_provider(
            geo,
            ConceptScheme(id=2,
                          uri='urn:x-skosprovider:geo',
                          labels=[
                              Label('Geografie', u'prefLabel', u'nl'),
                              Label('Geography', u'prefLabel', u'en')
                          ]), db_session)
        import_provider(
            styles_and_cultures,
            ConceptScheme(
                id=3,
                uri='https://id.erfgoed.net/thesauri/stijlen_en_culturen',
                labels=[
                    Label('Stijlen en Culturen', u'prefLabel', u'nl'),
                    Label('Styles and Cultures', u'prefLabel', u'en')
                ]), db_session)
        import_provider(
            materials,
            ConceptScheme(id=4,
                          uri='https://id.erfgoed.net/thesauri/materialen',
                          labels=[
                              Label('Materialen', u'prefLabel', u'nl'),
                              Label('Materials', u'prefLabel', u'en')
                          ]), db_session)
        import_provider(
            eventtypes,
            ConceptScheme(
                id=5,
                uri='https://id.erfgoed.net/thesauri/gebeurtenistypes',
                labels=[
                    Label('Gebeurtenistypes', u'prefLabel', u'nl'),
                    Label('Event types', u'prefLabel', u'en')
                ]), db_session)
        import_provider(
            heritagetypes,
            ConceptScheme(id=6,
                          uri='https://id.erfgoed.net/thesauri/erfgoedtypes',
                          labels=[
                              Label('Erfgoedtypes', u'prefLabel', u'nl'),
                              Label('Heritage types', u'prefLabel', u'en')
                          ]), db_session)
        import_provider(
            periods,
            ConceptScheme(id=7,
                          uri='https://id.erfgoed.net/thesauri/dateringen',
                          labels=[
                              Label('Dateringen', u'prefLabel', u'nl'),
                              Label('Periods', u'prefLabel', u'en')
                          ]), db_session)
        import_provider(
            species,
            ConceptScheme(id=8,
                          uri='https://id.erfgoed.net/thesauri/soorten',
                          labels=[
                              Label('Soorten', u'prefLabel', u'nl'),
                              Label('Species', u'prefLabel', u'en')
                          ]), db_session)
    print('--atramhasis-db-initialized--')
def upload_file_obj_db_s3():
    """ Upload file metadata to database and s3 """

    # TODO: upload metadata to database
    temp_engine = create_engine(NEX2_URI)
    session_factory = sessionmaker(bind=temp_engine,
                                   extension=ZopeTransactionExtension(),
                                   expire_on_commit=False)
    db_session = scoped_session(session_factory)
    readme_file_id = None
    file_content_list = file_upload_to_obj()
    try:
        if file_content_list:
            sorted_content = sorted(file_content_list,
                                    key=itemgetter('file_extension'))
            for item in sorted_content:
                if item['readme_name']:
                    readme = db_session.query(Filedbentity).filter(
                        Filedbentity.display_name ==
                        obj['readme_name']).one_or_none()

                    if readme is None:
                        logging.warning('unable to find README ' +
                                        obj['readme_name'])
                    else:
                        readme_file_id = readme.dbentity_id

                # see if file_meta already exists, else create
                existing_file_meta_data = db_session.query(
                    Filedbentity).filter(Filedbentity.display_name ==
                                         item['display_name']).one_or_none()
                source_id = db_session.query(Source.source_id).filter(
                    Source.display_name == item['source']).one_or_none()[0]

                d_name = item['display_name']
                f_ext = item['file_extension']
                temp_file_path = get_file_from_path_collection(f_ext, d_name)

                if not existing_file_meta_data:
                    try:
                        data_id = db_session.query(Edam.edam_id).filter(
                            Edam.edamid ==
                            item['data_edam_id']).one_or_none()[0]

                        format_id = db_session.query(Edam.edam_id).filter(
                            Edam.edamid ==
                            item['format_edam_id']).one_or_none()[0]
                        topic_id = db_session.query(Edam.edam_id).filter(
                            Edam.edamid ==
                            item['topic_edam_id']).one_or_none()[0]
                        item["data_id"] = data_id
                        item["format_id"] = format_id
                        item["topic_id"] = topic_id
                        item["source_id"] = source_id
                        item["readme_file_id"] = readme_file_id

                    except TypeError:
                        logging.error('invalid EDAM id or source in row ' +
                                      str(row_num) + ' val in ' +
                                      item['data_edam_id'] + ', ' +
                                      item['format_edam_id'] + ', ' +
                                      item['topic_edam_id'])

                    if temp_file_path:
                        with open(temp_file_path, 'r') as remote_file:
                            upload_file_helper(CREATED_BY, remote_file, item)

                    db_session.flush()
                else:
                    existing_file_meta_data.display_name = item['display_name']
                    existing_file_meta_data.description = item['description']
                    existing_file_meta_data.status = item['status']
                    existing_file_meta_data.is_public = item['is_public']
                    existing_file_meta_data.is_in_spell = item['is_in_spell']
                    existing_file_meta_data.is_in_browser = item[
                        'is_in_browser']
                    existing_file_meta_data.source_id = source_id

                    if temp_file_path:
                        with open(temp_file_path, 'r') as remote_file:
                            #update file size
                            if not existing_file_meta_data.file_size and existing_file_meta_data.s3_url:
                                remote_file.seek(0, os.SEEK_END)
                                file_size = remote_file.tell()
                                remote_file.seek(0)
                                existing_file_meta_data.file_size = file_size

                            if item['file_date']:
                                existing_file_meta_data.file_date = item[
                                    'file_date']
                                existing_file_meta_data.year = item[
                                    'file_date'].year
                            existing_file_meta_data.readme_file_id = readme_file_id
                            remote_file.seek(0, os.SEEK_END)

                            #transaction.commit()
                            existing_file_meta_data = db_session.query(
                                Filedbentity).filter(
                                    Filedbentity.display_name ==
                                    item['display_name']).one_or_none()
                            # only upload s3 file if not defined
                            if existing_file_meta_data.s3_url is None:
                                existing_file_meta_data.upload_file_to_s3(
                                    remote_file, item['display_name'])
                            db_session.flush()

    except Exception as e:
        logging.error("Exception occurred", exc_info=True)
Exemple #21
0
    desc,
)

from sqlalchemy.ext.declarative import declarative_base

from sqlalchemy.orm import (
    relationship,
    scoped_session,
    sessionmaker,
)

from zope.sqlalchemy import ZopeTransactionExtension
import transaction

DBSession = scoped_session(
    sessionmaker(extension=ZopeTransactionExtension(keep_session=True),
                 expire_on_commit=False))
Base = declarative_base()


class TimeStampMixin(object):
    creation_datetime = Column(DateTime, server_default=func.now())
    modified_datetime = Column(DateTime, server_default=func.now())


class CreationMixin():

    id = Column(UUIDType(binary=False), primary_key=True)

    @classmethod
    def add(cls, **kwargs):
Exemple #22
0
            del g.relationship_cache
    if has_request_context() and hasattr(flask_session, '_user'):
        delattr(flask_session, '_user')


def _column_names(constraint, table):
    return '_'.join((c if isinstance(c, basestring) else c.name)
                    for c in constraint.columns)


def _unique_index(constraint, table):
    return 'uq_' if constraint.unique else ''


naming_convention = {
    'fk': 'fk_%(table_name)s_%(column_names)s_%(referred_table_name)s',
    'pk': 'pk_%(table_name)s',
    'ix': 'ix_%(unique_index)s%(table_name)s_%(column_names)s',
    'ck': 'ck_%(table_name)s_%(constraint_name)s',
    'uq': 'uq_%(table_name)s_%(column_names)s',
    'column_names': _column_names,
    'unique_index': _unique_index
}

db = IndicoSQLAlchemy(
    session_options={'extension': ZopeTransactionExtension()})
db.Model.metadata.naming_convention = naming_convention
listen(db.Model.metadata, 'before_create', _before_create)
listen(mapper, 'mapper_configured', _mapper_configured)
listen(Session, 'after_transaction_end', _transaction_ended)
Exemple #23
0
 def setUpClass(cls):
     cls.engine = create_engine(dbconfig, encoding='utf8', echo=True)
     cls.Session = scoped_session(
         sessionmaker(bind=cls.engine,
                      extension=ZopeTransactionExtension()))
     cls.dbsession = cls.Session()
Exemple #24
0
def main(global_config, **settings):
    """ This function returns a Pyramid WSGI application.
    """

    cache = dogpile.cache.make_region(
        key_mangler=dogpile.cache.util.sha1_mangle_key)
    tahrir_api.model.Person.avatar_url = make_avatar_method(cache)
    tahrir_api.model.Person.email_md5 = property(
        lambda self: hashlib.md5(self.email).hexdigest())
    tahrir_api.model.Person.email_sha1 = property(
        lambda self: hashlib.sha1(self.email).hexdigest())

    identifier = settings.get('tahrir.openid_identifier')
    tahrir_api.model.Person.openid_identifier =\
            make_openid_identifier_property(identifier)

    tahrir_api.model.Person.created_on_rel =\
            make_relative_time_property('created_on')
    tahrir_api.model.Assertion.created_on_rel =\
            make_relative_time_property('created_on')
    tahrir_api.model.Assertion.issued_on_rel =\
            make_relative_time_property('issued_on')

    session_cls = scoped_session(
        sessionmaker(
            extension=ZopeTransactionExtension(),
            bind=create_engine(settings['sqlalchemy.url']),
        ))

    def get_db(request):
        """ Database retrieval function to be added to the request for
            calling anywhere.
        """
        session = session_cls()
        return TahrirDatabase(session=session,
                              autocommit=False,
                              notification_callback=notifications.callback)

    required_keys = [
        'tahrir.pngs.uri',
        'tahrir.admin',
        'tahrir.title',
        'tahrir.base_url',
    ]

    # validate the config
    for key in required_keys:
        if key not in settings:
            raise ValueError("%s required in settings." % key)

    # Make data dir if it doesn't already exist.
    settings['tahrir.pngs.uri'] = os.path.abspath(settings['tahrir.pngs.uri'])
    if not os.path.exists(settings['tahrir.pngs.uri']):
        os.makedirs(settings['tahrir.pngs.uri'])

    # Load secret stuff from secret.ini.
    try:
        default_path = os.path.abspath("secret.ini")
        secret_path = settings.get('secret_config_path', default_path)
        # TODO: There is a better way to log this message than print.
        print "Reading secrets from %r" % secret_path
        parser = ConfigParser.ConfigParser()
        parser.read(secret_path)
        secret_config = dict(parser.items("tahrir"))
        settings.update(secret_config)
    except Exception as e:
        # TODO: There is a better way to log this message than print.
        print 'Failed to load secret.ini.  Reason: %r' % str(e)

    authn_policy = AuthTktAuthenticationPolicy(
        secret=settings['authnsecret'],
        callback=groupfinder,  # groupfinder callback checks for admin privs
        hashalg='sha512',  # because md5 is deprecated
        secure=asbool(settings['tahrir.secure_cookies']),
        http_only=asbool(settings['tahrir.httponly_cookies']),
    )
    authz_policy = ACLAuthorizationPolicy()
    session_factory = UnencryptedCookieSessionFactoryConfig(
        secret=settings['session.secret'],
        cookie_secure=asbool(settings['tahrir.secure_cookies']),
        cookie_httponly=asbool(settings['tahrir.httponly_cookies']),
    )

    # Configure our cache that we instantiated earlier.
    cache.configure_from_config(settings, 'dogpile.cache.')

    config = Configurator(settings=settings,
                          root_factory=get_root,
                          session_factory=session_factory,
                          authentication_policy=authn_policy,
                          authorization_policy=authz_policy)

    import tahrir.custom_openid
    config.include('velruse.providers.openid')
    tahrir.custom_openid.add_openid_login(
        config,
        realm=settings.get('tahrir.openid_realm'),
        identity_provider=settings.get('tahrir.openid_identifier'),
    )

    config.include('pyramid_mako')

    config.add_request_method(get_db, 'db', reify=True)

    config.add_static_view(
        'static',
        settings.get('tahrir.theme_name', 'tahrir') + ':static',
        cache_max_age=3600,
    )
    config.add_static_view(
        'pngs',
        settings['tahrir.pngs.uri'],
        cache_max_age=3600,
    )

    config.add_route('home', '/')
    config.add_route('heartbeat', '/heartbeat')

    # main admin endpoint
    config.add_route('admin', '/admin')

    # delegated admin endpoints
    config.add_route('award', '/award')
    config.add_route('invite', '/invite')
    config.add_route('add_tag', '/add_tag')

    config.add_route('qrcode', '/qrcode')
    config.add_route('badge', '/badge/{id}')
    config.add_route('badge_full', '/badge/{id}/full')
    config.add_route('badge_json', '/badge/{id}/json')
    config.add_route('badge_rss', '/badge/{id}/rss')
    config.add_route('badge_stl', '/badge/{id}/stl')
    config.add_route('builder', '/builder')
    config.add_route('about', '/about')
    config.add_route('explore', '/explore')
    config.add_route('explore_badges', '/explore/badges')
    config.add_route('explore_badges_rss', '/explore/badges/rss')
    config.add_route('leaderboard', '/leaderboard')
    config.add_route('leaderboard_json', '/leaderboard/json')
    config.add_route('rank_json', '/leaderboard/{id}/json')
    config.add_route('tags', '/tags/{tags}/{match}')
    config.add_route('user', '/user/{id}')
    config.add_route('user_edit', '/user/{id}/edit')
    config.add_route('user_json', '/user/{id}/json')
    config.add_route('user_rss', '/user/{id}/rss')
    config.add_route('user_foaf', '/user/{id}/foaf')
    config.add_route('user_team_json', '/user/{id}/team/{team_id}/json')
    config.add_route('diff', '/diff/{id_a}/{id_b}')
    config.add_route('report', '/report')
    config.add_route('report_this_month', '/report/this/month')
    config.add_route('report_year', '/report/{year}')
    config.add_route('report_year_month', '/report/{year}/{month}')
    config.add_route('report_year_week', '/report/{year}/week/{weeknumber}')
    config.add_route('report_year_month_day', '/report/{year}/{month}/{day}')
    config.add_route('award_from_csv', '/award_from_csv')
    config.add_route('login', '/login')
    config.add_route('logout', '/logout')

    # Used to grab a "was awarded" html snippet asynchronously
    config.add_route('assertion_widget', '/_w/assertion/{person}/{badge}')

    config.scan()

    return config.make_wsgi_app()
def create_and_upload_file(obj, row_num, sftp_client):
    try:
        # find on local system
        if DATA_DIR:
            remote_file_path = DATA_DIR + '/' + obj['bun_path']
        else:
            remote_file_path = obj['bun_path']
        remote_file_path = remote_file_path + obj['display_name']
        remote_file = sftp_client.open(remote_file_path)
    except IOError:
        logging.error('error opening file ' + str(row_num))
        traceback.print_exc()
        return
    try:
        temp_engine = create_engine(NEX2_URI)
        session_factory = sessionmaker(bind=temp_engine,
                                       extension=ZopeTransactionExtension(),
                                       expire_on_commit=False)
        db_session = scoped_session(session_factory)
        # get README location
        readme_file_id = None
        if len(obj['readme_name']):
            readme = db_session.query(Filedbentity).filter(
                Filedbentity.display_name == obj['readme_name']).one_or_none()
            if readme is None:
                logging.warning('unable to find README ' + obj['readme_name'])
            else:
                readme_file_id = readme.dbentity_id

        # see if already exists, if not create
        existing = db_session.query(Filedbentity).filter(
            Filedbentity.display_name == obj['display_name']).one_or_none()

        source_id = db_session.query(Source.source_id).filter(
            Source.display_name == obj['source']).one_or_none()[0]

        if not existing:
            try:
                data_id = db_session.query(Edam.edam_id).filter(
                    Edam.edamid == obj['data_edam_id']).one_or_none()[0]
                format_id = db_session.query(Edam.edam_id).filter(
                    Edam.edamid == obj['format_edam_id']).one_or_none()[0]
                topic_id = db_session.query(Edam.edam_id).filter(
                    Edam.edamid == obj['topic_edam_id']).one_or_none()[0]
            except TypeError:
                logging.error('invalid EDAM id or source in row ' +
                              str(row_num) + ' val in ' + obj['data_edam_id'] +
                              ', ' + obj['format_edam_id'] + ', ' +
                              obj['topic_edam_id'])
                return

            print("remote_file=", remote_file)

            upload_file(CREATED_BY,
                        remote_file,
                        filename=obj['display_name'],
                        file_extension=obj['file_extension'],
                        description=obj['description'].replace('"', ''),
                        display_name=obj['display_name'],
                        data_id=data_id,
                        format_id=format_id,
                        status=obj['status'],
                        topic_id=topic_id,
                        is_public=obj['is_public'],
                        is_in_spell=obj['is_in_spell'],
                        is_in_browser=obj['is_in_browser'],
                        file_date=obj['file_date'],
                        readme_file_id=readme_file_id,
                        source_id=source_id)
            db_session.flush()
        else:
            existing.display_name = obj['display_name']
            existing.description = obj['description']
            existing.status = obj['status']
            existing.is_public = obj['is_public']
            existing.is_in_spell = obj['is_in_spell']
            existing.is_in_browser = obj['is_in_browser']
            existing.source_id = source_id
            # update file size
            if not existing.file_size and existing.s3_url:
                remote_file.seek(0, os.SEEK_END)
                file_size = remote_file.tell()
                remote_file.seek(0)
                existing.file_size = file_size
            if obj['file_date']:
                existing.file_date = obj['file_date']
                existing.year = obj['file_date'].year
            existing.readme_file_id = readme_file_id
            remote_file.seek(0, os.SEEK_END)
            transaction.commit()
            existing = db_session.query(Filedbentity).filter(
                Filedbentity.display_name ==
                obj['display_name']).one_or_none()
            # only upload s3 file if not defined
            if existing.s3_url is None:
                existing.upload_file_to_s3(remote_file, obj['display_name'])
            db_session.flush()
        # add path entries
        existing = db_session.query(Filedbentity).filter(
            Filedbentity.display_name == obj['display_name']).one_or_none()
        if not existing:
            logging.error('error with ' + obj['display_name'] + ' in row ' +
                          str(row_num))
            return
        path = db_session.query(Path).filter_by(
            path=obj['new_path']).one_or_none()
        if path is None:
            logging.warning('Could not find path ' + obj['new_path'] +
                            ' in row ' + str(row_num))
            return
        existing_filepath = db_session.query(FilePath).filter(
            and_(FilePath.file_id == existing.dbentity_id,
                 FilePath.path_id == path.path_id)).one_or_none()
        if not existing_filepath:
            new_filepath = FilePath(file_id=existing.dbentity_id,
                                    path_id=path.path_id,
                                    source_id=SGD_SOURCE_ID,
                                    created_by=CREATED_BY)
            db_session.add(new_filepath)
            transaction.commit()
            db_session.flush()
        # maybe add PMIDs
        if len(obj['pmids']):
            existing = db_session.query(Filedbentity).filter(
                Filedbentity.display_name ==
                obj['display_name']).one_or_none()
            pmids = obj['pmids'].split('|')
            for x in pmids:
                x = int(x.strip())
                existing_ref_file = db_session.query(ReferenceFile).filter(
                    ReferenceFile.file_id ==
                    existing.dbentity_id).one_or_none()
                ref = db_session.query(Referencedbentity).filter(
                    Referencedbentity.pmid == x).one_or_none()
                if ref and not existing_ref_file:
                    new_ref_file = ReferenceFile(created_by=CREATED_BY,
                                                 file_id=existing.dbentity_id,
                                                 reference_id=ref.dbentity_id,
                                                 source_id=SGD_SOURCE_ID)
                    db_session.add(new_ref_file)
                transaction.commit()
                db_session.flush()
        # maybe add keywords
        if len(obj['keywords']):
            existing = db_session.query(Filedbentity).filter(
                Filedbentity.display_name ==
                obj['display_name']).one_or_none()
            keywords = obj['keywords'].split('|')
            for x in keywords:
                x = x.strip()
                keyword = db_session.query(Keyword).filter(
                    Keyword.display_name == x).one_or_none()
                existing_file_keyword = db_session.query(FileKeyword).filter(
                    and_(FileKeyword.file_id == existing.dbentity_id,
                         FileKeyword.keyword_id ==
                         keyword.keyword_id)).one_or_none()
                if not existing_file_keyword:
                    new_file_keyword = FileKeyword(
                        created_by=CREATED_BY,
                        file_id=existing.dbentity_id,
                        keyword_id=keyword.keyword_id,
                        source_id=SGD_SOURCE_ID)
                    db_session.add(new_file_keyword)
                transaction.commit()
                db_session.flush()
        remote_file.close()
        logging.info('finished ' + obj['display_name'] + ', line ' +
                     str(row_num))
    except:
        logging.error('error with ' + obj['display_name'] + ' in row ' +
                      str(row_num))
        traceback.print_exc()
        db_session.rollback()
        db_session.close()
 def setUpClass(cls):
     cls.engine = engine_from_config(settings, prefix='sqlalchemy.')
     cls.session_maker = sessionmaker(
         bind=cls.engine,
         extension=ZopeTransactionExtension()
     )
Exemple #27
0
 def setUp(self):
     self.ip_address = random_ip()
     self.session = (scoped_session(
         sessionmaker(extension=ZopeTransactionExtension())))
from pyramid.security import Allow
from pyramid.security import Everyone
from pyramid.security import Authenticated

from OSMTM.utils import TileBuilder
from OSMTM.utils import max
from OSMTM.utils import get_tiles_in_geom
from shapely.wkt import loads

from OSMTM.history_meta import VersionedMeta, VersionedListener
from OSMTM.history_meta import _history_mapper

from datetime import datetime

DBSession = scoped_session(
    sessionmaker(extension=[ZopeTransactionExtension(),
                            VersionedListener()]))
Base = declarative_base()


class RootFactory(object):
    __acl__ = [(Allow, Everyone, 'view'), (Allow, Authenticated, 'edit'),
               (Allow, Authenticated, 'job'), (Allow, 'group:admin', 'admin')]

    def __init__(self, request):
        pass


class Tile(Base):
    __metaclass__ = VersionedMeta
    __tablename__ = "tiles"
Exemple #29
0
from sqlalchemy import Column, Date, ForeignKey, Integer, Unicode, func
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (backref, column_property, relationship,
                            scoped_session, sessionmaker)
from zope.sqlalchemy import ZopeTransactionExtension

DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))

Base = declarative_base()


class User(Base):
    """Define a User."""

    __tablename__ = "users"

    id = Column(Integer, primary_key=True)

    name = Column(Unicode, unique=True)

    birthday = Column(Date)

    address = relationship("Address", uselist=False, backref=backref("user"))

    age = column_property(
        func.strftime("%Y.%m%d", "now") -
        func.strftime("%Y.%m%d", birthday).cast(Integer))

    def __unicode__(self):
        """Give a readable representation of an instance."""
        return "%s" % self.name
Exemple #30
0
def session_maker_factory(engine):
    return scoped_session(
        sessionmaker(bind=engine,
                     autocommit=False,
                     extension=ZopeTransactionExtension()))