コード例 #1
0
def get_mobility_session(echo=False, autocommit=False):
    """returns a session to the database"""

    engine = model.create_engine('postgresql://ana@/mobility', echo=echo).execution_options(autocommit=autocommit)
    model.Base.metadata.bind = engine
    DBSession = sessionmaker(bind=engine)
    return DBSession()
コード例 #2
0
ファイル: __init__.py プロジェクト: archtool/archtool
def upgradeDatabase(url):
  ''' Upgrade the current database to the newest database structure. '''
  fname = model.fnameFromUrl(url)
  print 'Converting', fname

  # Close the engine
  model.clearEngine()
  # Export the current database
  engine = model.create_engine(url)
  export(engine = engine)
  # Create the new (upgraded) database and import the data
  data = loadCsv()
  importData(data, url+'.new')
  # Move the existing file to a backup location
  shutil.move(fname, fname+'.bak')
  # Get the new database to the existing location
  shutil.move(fname+'.new', fname)
コード例 #3
0
ファイル: env.py プロジェクト: aljeshishe/food_
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    connectable = model.create_engine(model.SQLALCHEMY_DATABASE_URI,
                                      poolclass=pool.NullPool)
    # connectable = engine_from_config(
    #     config.get_section(config.config_ini_section),
    #     prefix='sqlalchemy.',
    #     poolclass=pool.NullPool)

    with connectable.connect() as connection:
        context.configure(connection=connection,
                          target_metadata=target_metadata)

        with context.begin_transaction():
            context.run_migrations()
コード例 #4
0
ファイル: __init__.py プロジェクト: archtool/archtool
def importData(data, db):
  table_renames = {'fptousecase': 'fprepresentation',
                   'fptoview':'fprepresentation',
                   'connection':'blockconnection'}
  table_ignore = ['colorpalette', 'dbaseversion']
  tables = {tab.__tablename__:tab for tab in model.Base.getTables()}
  # Switch off the foreign keys for now
  model.check_fkeys = False
  engine = model.create_engine(db, echo=True)
  old_url = model.the_url
  model.the_url = db
  model.changeEngine(engine)
  # Clean the database
  model.cleanDatabase()
  try:
    session = model.SessionFactory()
    with model.sessionScope(session) as session:
      # In PostgreSQL, ensure the foreign keys are only checked at the commit, not before.
      if db.startswith('postgresql'):
        session.execute('SET CONSTRAINTS ALL DEFERRED')

      v = int(data['dbaseversion'][0]['Version'])
      if v < 6:
        upgradeToVersion6(data)
      del data['dbaseversion']    # Remove the old database version number
      # Add the current database version
      session.add(model.DbaseVersion())

      # Treat the planeable and anchor items differently: these are polymorphic tables.
      # The base tables are not added directly but through their child tables, using the ORM
      poly_items = {}  # Store the contents held by the polymorphic tables. These are needed later..
      poly_bases = {}  # Find the base for a specific table.
      for poly_table, poly_column in [('planeableitem', 'ItemType'), ('anchor', 'AnchorType')]:
        poly_items[poly_table] = {r['Id']:r for r in data[poly_table]}
        children = set([r[poly_column] for r in data[poly_table]])
        for c in children:
          poly_bases[c] = poly_table
        # Do not add the table directly, so remove it from the list.
        del data[poly_table]

      for n1, n2 in table_renames.items():
        if n1 in data:
          data[n2] = data[n1]

      for table, name in [(t, t.__tablename__) for t in model.order] + \
                  [(model.Base.metadata.tables['planeablexref'], 'planeablexref')]:
        records = data.get(name, [])
        if not records:
          continue
        # Start of a new table.
        if name in table_ignore:
          # Skip this table.
          continue
        if name not in tables:
          table = [name]
        else:
          table = tables[name]
        base_class = poly_bases.get(name, None)

        # Determine which fields are no longer used
        fields = records[0].keys()
        exclude = [f for f in fields if not hasattr(table, f)]
        for d in records:
          print 'Table:', name, 'data:', d
          # Exclude fields that have been removed from the database.
          if exclude:
            for e in exclude:
              del d[e]
          if base_class:
            # Add in the data stored in the polymorphic base table
            d.update(poly_items[base_class][d['Id']])

          # Add the record to the database
          if name not in tables:
            # This class needs raw SQL to create.
            if d:
              ins = table.insert().values(**d)
              session.execute(ins)
          else:
            el = table(**d)
            session.add(el)

    if db.startswith('postgresql'):
      # Only update the sequence for direct children from MyBase.
      # This excludes all polymorphic derivatives (e.g. Requirement) that have no sequence.
      for table in model.MyBase.__subclasses__()[0].__subclasses__():
        name = table.__tablename__
        # In PostgreSQL, the sequences are not updated automatically in this case...
        if 'Id' in table.getFields():
          # Fix the sequence number
          seqname = '%s_Id_seq'%name
          q = '''SELECT setval('"%s"', (SELECT MAX("Id") FROM %s))'''%(seqname, name)
          conn = engine.connect()
          conn.execute("commit")
          conn.execute(q)

    
  finally:
    model.the_engine = None
    model.SessionFactory = None
    model.check_fkeys = True
    model.the_url = old_url
コード例 #5
0
ファイル: __init__.py プロジェクト: archtool/archtool
  data = loadCsv()
  importData(data, url+'.new')
  # Move the existing file to a backup location
  shutil.move(fname, fname+'.bak')
  # Get the new database to the existing location
  shutil.move(fname+'.new', fname)






if __name__ == '__main__':
  import os.path
  import shutil
  from glob import glob
  
  fnames = glob('*.db')
  print 'Found files:', fnames
  for fname in fnames:
    print 'Converting', fname
    fbase, ext = os.path.splitext(fname)
    fold = '%s.old'%fbase
    fnew = '%s.new'%fbase
    engine = model.create_engine('sqlite:///archmodel.db')
    export('%s.csv'%fbase, engine)
    importCsv('%s.csv'%fbase, 'sqlite:///%s'%fnew)
    shutil.move(fname, fold)
    shutil.move(fnew, fname)
  
コード例 #6
0
ファイル: test.py プロジェクト: archtool/archtool
 def setUp(self):
   # Do the tests on a temporary database
   self.org_engine = model.the_engine
   self.engine = model.create_engine('sqlite:///:memory:', echo=True)
   model.changeEngine(self.engine)
コード例 #7
0
ファイル: req_export.py プロジェクト: mmullenAxians/archtool
    requirements = [r for r in requirements if len(r.StateChanges) > 0 and r.StateChanges[0].Status == "Question"]

    if len(requirements) == 0:
        return

    print >> out, ".. list-table:: "
    print >> out, "   :widths: 10, 90"
    print >> out, "   :header-rows: 1", "\n"

    print >> out, "   * - %s" % "\n     - ".join(["Req", "Question"])

    for req in requirements:
        question = req.StateChanges[0].Description
        if question:
            question = question.replace("\n", "\n       ")
        else:
            continue

        elements = ["   * - %s" % req.Name, "     - %s" % question]

        print >> out, "\n".join(elements).encode("cp1252")


if __name__ == "__main__":
    db = "sqlite:///archmodel.db"
    model.changeEngine(model.create_engine(db))
    session = model.SessionFactory()
    out = file("requirements.rst", "w")

    exportRequirementsOverview(session, out)
コード例 #8
0
import logging
from datetime import datetime
from sqlalchemy.orm import sessionmaker

from model import Post, WordSearch, PType, Base, create_engine

engine = create_engine('sqlite:///base.db', echo=True)

FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(format=FORMAT)

logger = logging.getLogger(__file__)
logger.setLevel(logging.DEBUG)


def createSession():
    Session = sessionmaker()
    return Session(bind=engine)


def createDB(engine=engine):
    Base.metadata.create_all(engine)
    logger.info('Create database: {}'.format(engine))


def addWordsSearch(word):
    session = createSession()
    word_db = WordSearch()
    word_db.text = word
    session.add(word_db)
コード例 #9
0
ファイル: test.py プロジェクト: johndpope/G-Terminail
"""
Test file to perform unittests on some of the modules.
This file can be extended to perform tests on all functions
and also do integration tests.
Coverage reports can be made from the tests using any
third party package such as nose etc..
"""
import unittest
from model import Email, create_engine, ROOT_DIR, Base
from datetime import datetime
from sqlalchemy.orm import sessionmaker
from process import composed, core

test_engine = create_engine(f'sqlite:///{ROOT_DIR}/sqlite_test.db')


class TestEmail(unittest.TestCase):
    def setUp(self):
        """Setup a test database and insert a dummy record."""
        DBSession = sessionmaker(bind=test_engine)
        Base.metadata.create_all(test_engine)
        session = DBSession()
        email_obj = Email(from_address="*****@*****.**",
                          to_address="*****@*****.**",
                          is_read=True,
                          is_archived=True,
                          subject="test",
                          message_body="test",
                          message_id="test",
                          label="INBOX",
                          has_attachment=False,