示例#1
0
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy import inspect
from sqlalchemy import Table
from sqlalchemy import Column, Integer, String
from sqlalchemy.engine.url import URL

db_url = {
    'drivername': 'postgres',
    'username': '******',
    'password': '******',
    'host': '192.168.99.100',
    'port': 5432
}
engine = create_engine(URL(**db_url))
m = MetaData()
table = Table('Test', m, Column('id', Integer, primary_key=True),
              Column('key', String, nullable=True), Column('val', String))

table.create(engine)
inspector = inspect(engine)
print('Test' in inspector.get_table_names())

table.drop(engine)
inspector = inspect(engine)
print('Test' in inspector.get_table_names())
import os
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.engine.url import URL
from sqlalchemy.orm import sessionmaker
from config import DATABASE, GMAP_API_KEY

OD_BASE_URL = 'https://data.cityofnewyork.us/resource/9w7m-hzhe.json?'
OD_URL_PARAM_LIMIT = '$limit='
OD_URL_PARAM_OFFSET = '$offset='

GMAP_GC_BASE_URL = 'https://maps.googleapis.com/maps/api/geocode/json?address='
GMAP_API_KEY_PARAM = '&key=' + GMAP_API_KEY

engine = create_engine(URL(**DATABASE))

Base = declarative_base()
Base.metadata.create_all(engine)

Session = sessionmaker(bind=engine)
session = Session()
示例#3
0
from config import DBCONFIG
from service.models import metadata
from sqlalchemy.engine.url import URL
from sqlalchemy import create_engine

dbengine = create_engine(URL(DBCONFIG['DRIVER'], DBCONFIG['USER'],
                             DBCONFIG['PASSWORD'], DBCONFIG['HOST'],
                             DBCONFIG.get('PORT'), DBCONFIG['DBNAME']),
                         echo=True)

metadata.create_all(dbengine)
示例#4
0
    with pytest.raises(SystemExit):
        with tracker_store.session_scope() as _:
            pass

    # error message is printed
    assert (
        f"Requested PostgreSQL schema '{requested_schema}' was not found in the "
        f"database." in capsys.readouterr()[0])


@pytest.mark.parametrize(
    "url,is_postgres_url",
    [
        (f"{PGDialect.name}://admin:pw@localhost:5432/rasa", True),
        (f"{SQLiteDialect.name}:///", False),
        (URL(PGDialect.name), True),
        (URL(SQLiteDialect.name), False),
    ],
)
def test_is_postgres_url(url: Union[Text, URL], is_postgres_url: bool):
    assert rasa.core.tracker_store.is_postgresql_url(url) == is_postgres_url


def set_or_delete_postgresql_schema_env_var(monkeypatch: MonkeyPatch,
                                            value: Optional[Text]) -> None:
    """Set `POSTGRESQL_SCHEMA` environment variable using `MonkeyPatch`.

    Args:
        monkeypatch: Instance of `MonkeyPatch` to use for patching.
        value: Value of the `POSTGRESQL_SCHEMA` environment variable to set.
    """
示例#5
0
from .utils import log, logerr

pg_vars = {
    'drivername': os.getenv('PG_DRIVER'),
    'username': os.getenv('PG_USER'),
    'password': os.getenv('PG_PASSWORD'),
    'host': os.getenv('PG_HOST'),
    'port': os.getenv('PG_PORT'),
    'database': os.getenv('PG_DATABASE')
}

pg_ssl = os.getenv('PG_SSL')

# Setup SQL Alchemy postgres connection.
engine = create_engine(URL(**pg_vars), connect_args={'sslmode': pg_ssl})
conn = engine.connect()


# Grab a page from C.io messages API with optional next param for pagination.
def get_page(next_page=None):
    params = {
        'metric': os.getenv('CIO_API_METRIC'),
        'type': os.getenv('CIO_API_TYPE'),
        'limit': os.getenv('CIO_API_LIMIT'),
        'start': next_page
    }
    user = os.getenv('CIO_API_USER')
    password = os.getenv('CIO_API_PASSWORD')
    uri = os.getenv('CIO_API_URI')
    r = requests.get(uri, params=params, auth=(user, password))
示例#6
0
import json
from sqlalchemy import create_engine
from sqlalchemy.engine.url import URL


prodb = {
  'database': 'prod',
  'username': '******',
  'password': '******',
  'host': '1.1.1.1',
  'port': '3306',
  'drivername': 'mysql+pymysql',
  'query': {'charset': 'utf8'}
}

engine_prod = create_engine(URL(**prodb))

engine_local = create_engine('mysql+pymysql://user:pass@localhost:3306/test?charset=utf8')



def local():
    excel = pd.ExcelFile("生产集群节点.xlsx")
    sheet_names = excel.sheet_names

    for sheet_name in sheet_names:

        if 'df' not in locals():
            df = pd.read_excel("生产集群节点.xlsx", sheet_name)
            df['vc_cluster'] = sheet_name
        else:
示例#7
0
mapper: Map Table to class
from sqlalchemy import (
    create_engine,
    Table,
    MetaData,
    Column,
    Integer,
    String,
    ForeignKey)

from sqlalchemy.orm import (
    mapper,
    relationship,
    sessionmaker)

# classical mapping: map "table" to "class"
db_url = 'sqlite://'
engine = create_engine(db_url)

meta = MetaData(bind=engine)

user = Table('User', meta,
             Column('id', Integer, primary_key=True),
             Column('name', String),
             Column('fullname', String),
             Column('password', String))

addr = Table('Address', meta,
             Column('id', Integer, primary_key=True),
             Column('email', String),
             Column('user_id', Integer, ForeignKey('User.id')))

# map table to class
class User(object):
    def __init__(self, name, fullname, password):
        self.name = name
        self.fullname = fullname
        self.password = password

class Address(object):
    def __init__(self, email):
        self.email = email

mapper(User, user, properties={
       'addresses': relationship(Address, backref='user')})
mapper(Address, addr)

# create table
meta.create_all()

# create session
Session = sessionmaker()
Session.configure(bind=engine)
session = Session()

u = User(name='Hello', fullname='HelloWorld', password='******')
a = Address(email='*****@*****.**')
u.addresses.append(a)
try:
    session.add(u)
    session.commit()

    # query result
    u = session.query(User).filter(User.name == 'Hello').first()
    print(u.name, u.fullname, u.password)

finally:
    session.close()
output:

$ python map_table_class.py
Hello HelloWorld ker
Get table dynamically
from sqlalchemy import (
    create_engine,
    MetaData,
    Table,
    inspect,
    Column,
    String,
    Integer)

from sqlalchemy.orm import (
    mapper,
    scoped_session,
    sessionmaker)

db_url = "sqlite://"
engine = create_engine(db_url)
metadata = MetaData(engine)

class TableTemp(object):
    def __init__(self, name):
        self.name = name

def get_table(name):
    if name in metadata.tables:
        table = metadata.tables[name]
    else:
        table = Table(name, metadata,
                Column('id', Integer, primary_key=True),
                Column('name', String))
        table.create(engine)

    cls = type(name.title(), (TableTemp,), {})
    mapper(cls, table)
    return cls

# get table first times
t = get_table('Hello')

# get table secone times
t = get_table('Hello')

Session = scoped_session(sessionmaker(bind=engine))
try:
    Session.add(t(name='foo'))
    Session.add(t(name='bar'))
    for _ in Session.query(t).all():
        print(_.name)
except Exception as e:
    Session.rollback()
finally:
    Session.close()
output:

$ python get_table.py
foo
bar
Object Relational join two tables
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.engine.url import URL
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base

Base = declarative_base()

class User(Base):
    __tablename__ = 'user'
    id    = Column(Integer, primary_key=True)
    name  = Column(String)
    addresses = relationship("Address", backref="user")

class Address(Base):
    __tablename__ = 'address'
    id = Column(Integer, primary_key=True)
    email = Column(String)
    user_id = Column(Integer, ForeignKey('user.id'))

db_url = {'drivername': 'postgres',
          'username': '******',
          'password': '******',
          'host': '192.168.99.100',
          'port': 5432}

# create engine
engine = create_engine(URL(**db_url))

# create tables
Base.metadata.create_all(bind=engine)

# create session
Session = sessionmaker()
Session.configure(bind=engine)
session = Session()

user = User(name='user1')
mail1 = Address(email='*****@*****.**')
mail2 = Address(email='*****@*****.**')
user.addresses.extend([mail1, mail2])

session.add(user)
session.add_all([mail1, mail2])
session.commit()

query = session.query(Address, User).join(User)
for _a, _u in query.all():
    print(_u.name, _a.email)
output:

$ python sqlalchemy_join.py
user1 [email protected]
user1 [email protected]
join on relationship and group_by count
from sqlalchemy import (
    create_engine,
    Column,
    String,
    Integer,
    ForeignKey,
    func)

from sqlalchemy.orm import (
    relationship,
    sessionmaker,
    scoped_session)

from sqlalchemy.ext.declarative import declarative_base

db_url = 'sqlite://'
engine = create_engine(db_url)

Base = declarative_base()

class Parent(Base):
    __tablename__ = 'parent'
    id       = Column(Integer, primary_key=True)
    name     = Column(String)
    children = relationship('Child', back_populates='parent')

class Child(Base):
    __tablename__ = 'child'
    id        = Column(Integer, primary_key=True)
    name      = Column(String)
    parent_id = Column(Integer, ForeignKey('parent.id'))
    parent    = relationship('Parent', back_populates='children')

Base.metadata.create_all(bind=engine)
Session = scoped_session(sessionmaker(bind=engine))

p1 = Parent(name="Alice")
p2 = Parent(name="Bob")

c1 = Child(name="foo")
c2 = Child(name="bar")
c3 = Child(name="ker")
c4 = Child(name="cat")

p1.children.extend([c1, c2, c3])
p2.children.append(c4)

try:
    Session.add(p1)
    Session.add(p2)
    Session.commit()

    # count number of children
    q = Session.query(Parent, func.count(Child.id))\
               .join(Child)\
               .group_by(Parent.id)

    # print result
    for _p, _c in q.all():
        print('parent: {}, num_child: {}'.format(_p.name, _c))
finally:
    Session.remove()
output:

$ python join_group_by.py
parent: Alice, num_child: 3
parent: Bob, num_child: 1
Create tables with dynamic columns (ORM)
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy import inspect
from sqlalchemy.engine.url import URL
from sqlalchemy.ext.declarative import declarative_base

db_url = {'drivername': 'postgres',
          'username': '******',
          'password': '******',
          'host': '192.168.99.100',
          'port': 5432}

engine = create_engine(URL(**db_url))
Base = declarative_base()

def create_table(name, cols):
    Base.metadata.reflect(engine)
    if name in Base.metadata.tables: return

    table = type(name, (Base,), cols)
    table.__table__.create(bind=engine)

create_table('Table1', {
             '__tablename__': 'Table1',
             'id': Column(Integer, primary_key=True),
             'name': Column(String)})

create_table('Table2', {
             '__tablename__': 'Table2',
             'id': Column(Integer, primary_key=True),
             'key': Column(String),
             'val': Column(String)})

inspector = inspect(engine)
for _t in inspector.get_table_names():
    print(_t)
output:

$ python sqlalchemy_dynamic_orm.py
Table1
Table2
Close database connection
from sqlalchemy import (
    create_engine,
    event,
    Column,
    Integer)

from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base

engine = create_engine('sqlite://')
base = declarative_base()

@event.listens_for(engine, 'engine_disposed')
def receive_engine_disposed(engine):
    print("engine dispose")

class Table(base):
    __tablename__ = 'example table'
    id = Column(Integer, primary_key=True)

base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()

try:
    try:
        row = Table()
        session.add(row)
    except Exception as e:
        session.rollback()
        raise
    finally:
        session.close()
finally:
    engine.dispose()
output:

$ python db_dispose.py
engine dispose
Warning
Be careful. Close session does not mean close database connection. SQLAlchemy session generally represents the transactions, not connections.

Cannot use the object after close the session
from __future__ import print_function

from sqlalchemy import (
    create_engine,
    Column,
    String,
    Integer)

from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base


url = 'sqlite://'
engine = create_engine(url)
base = declarative_base()

class Table(base):
    __tablename__ = 'table'
    id  = Column(Integer, primary_key=True)
    key = Column(String)
    val = Column(String)

base.metadata.create_all(bind=engine)
session = sessionmaker(bind=engine)()

try:
    t = Table(key="key", val="val")
    try:
        print(t.key, t.val)
        session.add(t)
        session.commit()
    except Exception as e:
        print(e)
        session.rollback()
    finally:
        session.close()

    print(t.key, t.val) # exception raise from here
except Exception as e:
    print("Cannot use the object after close the session")
finally:
    engine.dispose()
output:

$ python sql.py
key val
Cannot use the object after close the session
Logo



This project tries to provide many snippets of Python code that make life easier.

Useful Links
pysheeet website
pysheeet @ GitHub
Issue Tracker
pysheeet as a PDF
ads via Carbon
Bring your team together with Slack, the collaboration hub for work.
ads via Carbon
Table of Contents
SQLAlchemy
Set a database URL
Sqlalchemy Support DBAPI - PEP249
Transaction and Connect Object
Metadata - Generating Database Schema
Inspect - Get Database Information
Reflection - Loading Table from Existing Database
Get Table from MetaData
Create all Tables Store in “MetaData”
Create Specific Table
Create table with same columns
Drop a Table
Some Table Object Operation
SQL Expression Language
insert() - Create an “INSERT” Statement
select() - Create a “SELECT” Statement
join() - Joined Two Tables via “JOIN” Statement
Delete Rows from Table
Check Table Existing
Create multiple tables at once
Create tables with dynamic columns (Table)
Object Relational add data
Object Relational update data
Object Relational delete row
Object Relational relationship
Object Relational self association
Object Relational basic query
mapper: Map Table to class
Get table dynamically
Object Relational join two tables
join on relationship and group_by count
Create tables with dynamic columns (ORM)
Close database connection
Cannot use the object after close the session
Quick search
©2016-2019, crazyguitar. | Page source
Fork me on GitHub
示例#8
0
def main():
    engine = create_engine(URL(**DATABASE), echo=False)
    Base.metadata.create_all(engine)
    Session = sessionmaker(bind=engine)
    session = Session()
示例#9
0
import logging
import sys
import psycopg2 as pg
import string
from datetime import datetime, timezone
import databaseconfig as config
from sqlalchemy import create_engine, MetaData, Table, select
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.engine.url import URL
from sqlalchemy.sql.expression import bindparam
from sqlalchemy.pool import NullPool
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import get_mapper


engine = create_engine(URL(**config.postgres), poolclass=NullPool, echo=False)
metadata = MetaData()
metadata.reflect(engine)
Base = automap_base(metadata=metadata)
Base.prepare(engine, reflect=True)
Session= sessionmaker(bind=engine, autocommit=True)

_logger = logging.getLogger(__name__)
logformat = "[%(asctime)s] %(levelname)s:%(name)s:%(message)s"
logging.basicConfig(level=20, stream=sys.stdout,
                    format=logformat, datefmt="%Y-%m-%d %H:%M:%S")

products_api = "http://apiconsulta.medicamentos.gob.sv/public/productos"
def process_products(products, session): 
    products_table = metadata.tables['pharmaceuticals']
    for prod in products:
示例#10
0
def connect_database():
    return create_engine(URL(**settings.DATABASE))
示例#11
0
# Third party lib imports

from flask import Flask

from sqlalchemy.engine.url import URL

from flask.ext.sqlalchemy import SQLAlchemy

# Local imports
from naas import settings

app = Flask(__name__)

app.config['SQLALCHEMY_DATABASE_URI'] = URL(**settings.DATABASE)

db = SQLAlchemy(app)

import naas.views
示例#12
0
def db_connect():
    return create_engine(URL(**settings.DATABASE))
示例#13
0
    def crawl(self):
        import math
        import time
        import urllib.parse
        import urllib.request
        from crawler_db import (Status, Link, Product)
        from crawler_web import Webpage

        import sqlalchemy
        from sqlalchemy import (create_engine, exc)
        from sqlalchemy.engine.url import URL
        from sqlalchemy.orm import (Session, exc)

        # connect to database
        try:
            engine = create_engine(URL("mysql+mysqlconnector",
                                   username=self.dbuser,
                                   password=self.dbpass,
                                   host=self.dbhost,
                                   port=self.dbport))
            engine.execute("USE " + self.dbschema)  # select database
        except Exception as e:
            print("ERROR: Can't connect to database (%s)" % e)
            return 1

        session = Session(engine)
        start_time = time.time()

        num_retries = 3
        if self.job_num:
            attempts = num_retries  # running in cluster mode
            # retry if new links are not available momentarily,
            # otherwise process has finished
        else:
            attempts = 1  # running in standalone mode
            # no retry

        while attempts:
            # process same site first
            link = session.query(Link).filter(Link.depth > 1, Link.status == Status.new).with_for_update().first()
            if not link:
                link = session.query(Link).filter_by(status=Status.new).with_for_update().first()

            while link:
                attempts = num_retries  # restart attempts
                this_url = link.url
                link_depth = link.depth + 1
                self.num_processed += 1
                self.host = urllib.parse.urlparse(this_url)[1]

                status = Status.visited
                try:
                    page = Webpage(this_url)
                    if not page.fetch():
                        status = Status.error
                    else:
                        if not self.depth or (link_depth <= self.depth):
                            for link_url in [self._pre_visit_url_condense(l) for l in page.out_urls]:
                                # apply pre-visit filters.
                                do_not_follow = [f for f in self.pre_visit_filters if not f(link_url)]

                                # if no filters failed, process URL
                                if [] == do_not_follow:
                                    new_link = Link(link_url, depth=link_depth)
                                    session.begin_nested()  # establish a savepoint
                                    session.add(new_link)
                                    try:
                                        session.flush()
                                    except sqlalchemy.exc.IntegrityError:  # rollback duplicated entry
                                        session.rollback()
                                        continue
                                    except exc.FlushError:  # rollback duplicated entry
                                        session.rollback()
                                        continue
                                    except:
                                        session.rollback()
                                        raise
                                    session.commit()

                        # apply product filters.
                        is_product = [f for f in self.product_filters if not f(page)]

                        # if no filters failed, process product
                        if [] == is_product:
                            product = Product(this_url, title=page.title, name=page.product_name)
                            session.begin_nested()  # establish a savepoint
                            session.add(product)
                            try:
                                session.flush()
                            except:
                                session.rollback()
                                raise
                            session.commit()

                except Exception as e:
                    print("ERROR: Can't process url '%s' (%s)" % (this_url, e))
                    status = Status.error

                link.status = status
                session.commit()

                # process same site first
                link = session.query(Link).filter(Link.depth > 1, Link.status == Status.new).with_for_update().first()
                if not link:
                    link = session.query(Link).filter_by(status=Status.new).with_for_update().first()

            # sleep if running in cluster mode
            if self.job_num:
                time.sleep(5)
            attempts -= 1

        end_time = time.time()
        time_diff = end_time - start_time

        rate = 0
        if time_diff:
            rate = int(math.ceil(float(self.num_processed) / time_diff))

        print("\tProcessed:    %d" % self.num_processed)
        print("\tStats:        %d/s after %0.2fs" % (rate, time_diff))

        session.close()
        engine.dispose()
        return 0
示例#14
0
def main(argv):

    # parse command line options
    opts, args = parse_options(argv)
    if not (opts or args):
        return 1

    urlfile = opts.urlfile
    csvfile = opts.csvfile
    dbuser = opts.dbuser
    dbpass = opts.dbpass
    dbhost = opts.dbhost
    dbport = opts.dbport
    dbschema = opts.dbschema
    depth = opts.depth
    resume = opts.resume
    cluster_jobs = opts.cluster_jobs

    # connect to database
    try:
        engine = create_engine(URL("mysql+mysqlconnector",
                               username=dbuser,
                               password=dbpass,
                               host=dbhost,
                               port=dbport))
        # create database schema
        engine.execute("CREATE DATABASE IF NOT EXISTS " + dbschema)
        engine.execute("USE " + dbschema)
    except Exception as e:
        print("ERROR: Can't connect to database (%s)" % e)
        return 1

    # if resume previous crawl, do not clean database
    if not resume:
        # read file with URL list
        if urlfile:
            try:
                with open(urlfile, 'r') as file:
                    url_list = [line.strip() for line in file.readlines()]
                if not url_list:
                    print("No data in file %s" % urlfile)
                    return 1
                print("Read file %s" % urlfile)
            except IOError as error:
                print("I/O error({0}): {1}".format(error.errno, error.strerror))
                return 1
            except:  # handle other exceptions such as attribute errors
                print("Unexpected error:", sys.exc_info()[0])
                return 1
        else:
            # URL in command line argument
            url_list = [args[0]]

        # clean database
        Base.metadata.drop_all(engine)
        Base.metadata.create_all(engine)

        session = Session(engine)

        # insert URL list into database
        links = []
        for url in url_list:
            links.append(Link(url))
        session.add_all(links)
        session.commit()

        session.close()

    # run in cluster mode
    if cluster_jobs > 0:
        # 'compute' needs definition of class Crawler
        cluster = dispy.JobCluster(compute, depends=[Crawler])
        jobs = []
        for i in range(1, cluster_jobs + 1):
            crawler = Crawler(i, dbuser, dbpass, dbhost, dbport, dbschema, depth)  # create object of Crawler
            job = cluster.submit(crawler)  # it is sent to a node for executing 'compute'
            job.id = crawler  # store this object for later use
            jobs.append(job)

        # waits until all jobs finish
        for job in jobs:
            job()  # wait for job to finish
            print('Job %s:\n%s\n%s\n%s' % (job.id.job_num, job.stdout, job.stderr, job.exception))
    else:  # run in standalone mode
        crawler = Crawler(0, dbuser, dbpass, dbhost, dbport, dbschema, depth)
        crawler.crawl()

    # write product list to .csv file
    if csvfile:
        if not csvfile.lower().endswith('.csv'):
            csvfile += '.csv'
        try:
            with open(csvfile, 'w', newline='') as outfile:
                writer = csv.writer(outfile, quoting=csv.QUOTE_ALL)

                session = Session(engine)
                # paginate results
                rows_per_page = 50
                page_number = 0
                dbquery = session.query(Product)

                products = dbquery.limit(rows_per_page).offset(page_number * rows_per_page).all()
                while products:
                    for product in products:
                        writer.writerow([product.url, product.title, product.name])

                    page_number += 1
                    products = dbquery.limit(rows_per_page).offset(page_number * rows_per_page).all()

            print("Write file %s" % csvfile)
        except IOError as error:
            print("I/O error({0}): {1}".format(error.errno, error.strerror))
            return 1
        except:  # handle other exceptions such as attribute errors
            print("Unexpected error:", sys.exc_info()[0])
            return 1

        session.close()

    engine.dispose()
    return 0
示例#15
0
base_path = config.get("base", "path")

if not base_path:
    base_path = './base.db'
    config.set("base", "path", "./base.db")
    config.commit()

Model = declarative_base()

DATABASE = {
    'drivername': 'sqlite',
    'database': base_path
}

dbengine = create_engine(URL(**DATABASE), connect_args={'check_same_thread': False})
Session = sessionmaker(bind=dbengine)
LOGGER.log(logging.INFO, msg="Creating session with database %s" % base_path)
session = Session()


class Bot(Model):
    __tablename__ = 'bots'
    name = Column(String, primary_key=True)
    platform = Column(String)
    token = Column(String, unique=True)
    start_message = Column(String)
    default_response = Column(String)
    start_callback = Column(String)
    message_callback = Column(String)
    callback_auth = Column(Boolean)
示例#16
0
def db_connect():
    return create_engine(URL(**ProxyFetcher.settings.DATABASE))
示例#17
0
from settings import dictionary


def to_dictionary(instance):
    d = {}
    for column in instance.__table__.columns:
        d[column.name] = getattr(instance, column.name)
        if isinstance(d[column.name], datetime):
            d[column.name] = d[column.name].isoformat(' ')
        if isinstance(d[column.name], date):
            d[column.name] = d[column.name].isoformat()
    return d

mysql = {}
mysql['engine'] = create_engine(
    URL(**dictionary['MYSQL']),
    convert_unicode=True,
    echo=False,
    pool_recycle=15,
    pool_size=25,
    pool_timeout=15,
    strategy='threadlocal',
)
mysql['base'] = declarative_base(
    bind=mysql['engine'], metadata=ThreadLocalMetaData()
)
mysql['base'].to_dictionary = to_dictionary
mysql['session'] = scoped_session(sessionmaker(
    autocommit=False,
    autoflush=False,
    bind=mysql['engine'],
示例#18
0
from sqlalchemy import Column, Integer, String, ForeignKey, create_engine, DateTime, Float
from sqlalchemy.engine.url import URL
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from consts import MYSQL

DB_URL = URL(drivername='mysql+pymysql',
             username=MYSQL['user'],
             password=MYSQL['password'],
             host=MYSQL['host'],
             port=MYSQL['port'],
             database=MYSQL['data_base'])
Base = declarative_base()


class NmapScan(Base):
    """
    The table which holds the metadata about the scan
    """
    __tablename__ = 'nmap_scan'
    id = Column(Integer, primary_key=True, autoincrement=True)
    ip = Column(String(24))
    start_time = Column(DateTime)
    elapsed = Column(Float)
    status = Column(String(32))
    ports = relationship("PortScan", back_populates="npm_scan")


class PortScan(Base):
    """
    The table which holds all the data about the ports that were scanned.
示例#19
0
https://www.pythonsheets.com/notes/python-sqlalchemy.html


SQLAlchemy
Set a database URL
from sqlalchemy.engine.url import URL

postgres_db = {'drivername': 'postgres',
               'username': '******',
               'password': '******',
               'host': '192.168.99.100',
               'port': 5432}
print(URL(**postgres_db))

sqlite_db = {'drivername': 'sqlite', 'database': 'db.sqlite'}
print(URL(**sqlite_db))
output:

$ python sqlalchemy_url.py
postgres://postgres:[email protected]:5432
sqlite:///db.sqlite
Sqlalchemy Support DBAPI - PEP249
from sqlalchemy import create_engine

db_uri = "sqlite:///db.sqlite"
engine = create_engine(db_uri)

# DBAPI - PEP249
# create table
engine.execute('CREATE TABLE "EX1" ('
               'id INTEGER NOT NULL,'
示例#20
0
文件: db.py 项目: loicdtx/idrop-db
from contextlib import contextmanager

from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.engine.url import URL
from sqlalchemy.event import listen
from sqlalchemy.sql import select, func

from idb.globals import DB_CONFIG

# Make a dict of URLs and dict of engines
urls = {k:URL(**v) for k,v in DB_CONFIG.items()}
engines = {k:create_engine(v) for k,v in urls.items()}

Base = declarative_base()


def load_spatialite(dbapi_conn, connection_record):
    dbapi_conn.enable_load_extension(True)
    dbapi_conn.load_extension('/usr/lib/x86_64-linux-gnu/mod_spatialite.so')


def init_db(env='main', engines=engines):
    import idb.models
    engine = engines[env]
    if engine.url.drivername.startswith('sqlite'):
        listen(engine, 'connect', load_spatialite)
        conn = engine.connect()
        conn.execute(select([func.InitSpatialMetaData()]))
        conn.close()
示例#21
0
"""
Hold Objects for SQL Tables and Manage Database Session
"""
from contextlib import contextmanager
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.engine.url import URL
from sqlalchemy.orm import sessionmaker
import sys

sys.path.insert(0, './')
import settings

ENGINE = sa.create_engine(URL(**settings.DATABASE))
SESSION = sessionmaker(bind=ENGINE)


@contextmanager
def session_scope():
    """ Control the SQL Database Session  """
    session = SESSION()
    try:
        yield session
        session.commit()
        print("sql command succeeded")
    except:
        session.rollback()
        print("sql command failed")
    finally:
        session.close()
示例#22
0
if __name__ == "__main__":
    from model_state import Base, State
    from sys import argv
    import sqlalchemy
    from sqlalchemy.engine.url import URL
    from sqlalchemy import create_engine
    from sqlalchemy.orm import Session

    mysql = {
        'drivername': 'mysql+mysqldb',
        'host': 'localhost',
        'port': '3306',
        'username': argv[1],
        'password': argv[2],
        'database': argv[3],
    }

    url = URL(**mysql)

    engine = create_engine(url, pool_pre_ping=True)
    Base.metadata.create_all(engine)

    session = Session(engine)

    query = session.query(State).filter(State.name.like('%a%'))\
                                .order_by(State.id)
    for r in query.all():
        print("{}: {}".format(r.id, r.name))

    session.close()
示例#23
0
 def get_connection_string(self):
     return str(URL('sqlite+pysqlite', None, None, None, None, self.dbpath))
示例#24
0
from sqlalchemy import *
from sqlalchemy.engine.url import URL
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.schema import ForeignKeyConstraint
from sqlalchemy.dialects.postgresql import ARRAY
from sqlalchemy.orm import sessionmaker, Session

postgres_db = {
    "drivername": "postgres",
    "username": "******",
    "password": "******",
    "host": "localhost",
    "port": 5432,
}
engine = create_engine(URL(**postgres_db))
metadata = MetaData()

Base = declarative_base(bind=engine, metadata=metadata)


class Games(Base):
    __tablename__ = "games"

    game_id = Column(BIGINT, primary_key=True)

    def __repr__(self):
        return f"{self.game_id}"


class GamePlayerMetadata(Base):
    __tablename__ = "game_player_metadata"
示例#25
0
def get_engine() -> Engine:
    return create_engine(URL(**DATABASE))
示例#26
0
def make_engine():
    engine = create_engine(URL(**DATABASE_CONNECTION), poolclass=NullPool)
    return engine.connect()
示例#27
0
def db_connect():
    """
    Performs database connection using database settings from settings.py.
    Returns sqlalchemy engine instance
    """
    return create_engine(URL(**frapy.settings.DATABASE))
示例#28
0
 def SQLALCHEMY_DATABASE_URI(self):
     return URL(self.engine,
                username=self.username,
                password=self.password,
                host=self.host,
                database=self.database)
    def __init__(
        self,
        credentials,
        table_name,
        key_columns,
        fixed_length_key=True,
        suppress_store_backend_id=False,
        manually_initialize_store_backend_id: str = "",
        store_name=None,
    ):
        super().__init__(
            fixed_length_key=fixed_length_key,
            suppress_store_backend_id=suppress_store_backend_id,
            manually_initialize_store_backend_id=
            manually_initialize_store_backend_id,
            store_name=store_name,
        )
        if not sqlalchemy:
            raise ge_exceptions.DataContextError(
                "ModuleNotFoundError: No module named 'sqlalchemy'")

        if not self.fixed_length_key:
            raise ge_exceptions.InvalidConfigError(
                "DatabaseStoreBackend requires use of a fixed-length-key")

        drivername = credentials.pop("drivername")
        schema = credentials.pop("schema", None)
        options = URL(drivername, **credentials)
        self.engine = create_engine(options)

        meta = MetaData(schema=schema)
        self.key_columns = key_columns
        # Dynamically construct a SQLAlchemy table with the name and column names we'll use
        cols = []
        for column in key_columns:
            if column == "value":
                raise ge_exceptions.InvalidConfigError(
                    "'value' cannot be used as a key_element name")
            cols.append(Column(column, String, primary_key=True))
        cols.append(Column("value", String))
        try:
            table = Table(table_name,
                          meta,
                          autoload=True,
                          autoload_with=self.engine)
            # We do a "light" check: if the columns' names match, we will proceed, otherwise, create the table
            if {str(col.name).lower()
                    for col in table.columns
                } != (set(key_columns) | {"value"}):
                raise ge_exceptions.StoreBackendError(
                    f"Unable to use table {table_name}: it exists, but does not have the expected schema."
                )
        except NoSuchTableError:
            table = Table(table_name, meta, *cols)
            try:
                if schema:
                    self.engine.execute(
                        f"CREATE SCHEMA IF NOT EXISTS {schema};")
                meta.create_all(self.engine)
            except SQLAlchemyError as e:
                raise ge_exceptions.StoreBackendError(
                    f"Unable to connect to table {table_name} because of an error. It is possible your table needs to be migrated to a new schema.  SqlAlchemyError: {str(e)}"
                )
        self._table = table
        # Initialize with store_backend_id
        self._store_backend_id = None
        self._store_backend_id = self.store_backend_id
示例#30
0
def get_url():
    """
    Return the URL to be used with engine creation based on configuration
    """
    used = Profile.get_used()
    return URL(**dict(used))