Esempio n. 1
0

instance_name="ocp01"

#'"select * from instances where display_name="nfv_instance 00 011"'




engine = db.create_engine('mysql://*****:*****@10.255.26.100:3306/nova')
connection = engine.connect()
metadata = db.MetaData()




nova_instances=db.Table('instances', metadata, autoload=True, autoload_with=engine)
instances_uuid=connection.execute(db.select([nova_instances.columns.uuid]).where(nova_instances.columns.display_name==instance_name)).fetchall()

print(instances_uuid)
#select numa_topology from instance_extra where instance_uuid="15a468f0-82eb-4cae-bca8-48bddf03e711"
nova_instance_extra=db.Table('instance_extra', metadata, autoload=True, autoload_with=engine)
instance_info=connection.execute(db.select([nova_instance_extra.columns.numa_topology]).where(nova_instances.columns.instance_uuid==instances_uuid)).fetchall()

instance_cell=instance_info["nova_object.data"]
instance_cpu=[]
for numa in instance_cell["cells"]:
	cpuset=[x for x in numa['nova_object.data']["cpuset"]]
	instance_cpu+=cpuset
	print(instance_cpu )
Esempio n. 2
0
def get_metadef_tags_table(meta):
    return sqlalchemy.Table('metadef_tags', meta, autoload=True)
Esempio n. 3
0
        try:
            study = db_session.query(Study).filter_by(name=key).one()
        except orm.exc.NoResultFound:
            raise KeyError
        study.__parent__ = self
        return study


# Configured forms for the study
study_schema_table = sa.Table(
    'study_schema', StudiesModel.metadata,
    sa.Column('study_id',
              sa.Integer(),
              sa.ForeignKey('study.id',
                            name='fk_study_schema_study_id',
                            ondelete='CASCADE'),
              primary_key=True),
    sa.Column('schema_id',
              sa.Integer(),
              sa.ForeignKey(datastore.Schema.id,
                            name='fk_study_schema_schema_id',
                            ondelete='CASCADE'),
              primary_key=True))

# Configured forms for the cycle
cycle_schema_table = sa.Table(
    'cycle_schema', StudiesModel.metadata,
    sa.Column('cycle_id',
              sa.Integer(),
              sa.ForeignKey('cycle.id',
                            name='fk_cycle_schema_cycle_id',
                            ondelete='CASCADE'),
Esempio n. 4
0
from data.database.all_for_session import SqlAlchemyBase
import sqlalchemy

# Промежуточная таблица между пользователем и олимпиадой
user_to_olimpiad = sqlalchemy.Table(
    'user_to_olimpiad', SqlAlchemyBase.metadata,
    sqlalchemy.Column('user', sqlalchemy.Integer,
                      sqlalchemy.ForeignKey('users.id')),
    sqlalchemy.Column('olimpiad', sqlalchemy.Integer,
                      sqlalchemy.ForeignKey('olimpiads.id')))
Esempio n. 5
0
def get_metadef_namespace_resource_types_table(meta):
    return sqlalchemy.Table('metadef_namespace_resource_types', meta,
                            autoload=True)
Esempio n. 6
0
import sqlalchemy as db

SqlEventLogStorageMetadata = db.MetaData()

SqlEventLogStorageTable = db.Table(
    'event_logs',
    SqlEventLogStorageMetadata,
    db.Column('id', db.Integer, primary_key=True, autoincrement=True),
    db.Column('run_id', db.String(255)),
    db.Column('event', db.Text, nullable=False),
    db.Column('dagster_event_type', db.Text),
    db.Column('timestamp', db.types.TIMESTAMP),
)
Esempio n. 7
0
import sqlalchemy as sa

metadata = sa.MetaData()

# Users
users = sa.Table(
    "users",
    metadata,
    sa.Column("id", sa.Integer, nullable=False),
    sa.Column("login", sa.String(256), nullable=False),
    sa.Column("passwd", sa.String(256), nullable=False),
    sa.Column(
        "is_superuser", sa.Boolean, nullable=False, server_default="FALSE"
    ),
    sa.Column("disabled", sa.Boolean, nullable=False, server_default="FALSE"),
    # indices
    sa.PrimaryKeyConstraint("id", name="user_pkey"),
    sa.UniqueConstraint("login", name="user_login_key"),
)


# Permissions
permissions = sa.Table(
    "permissions",
    metadata,
    sa.Column("id", sa.Integer, nullable=False),
    sa.Column("user_id", sa.Integer, nullable=False),
    sa.Column("perm_name", sa.String(64), nullable=False),
    # indices
    sa.PrimaryKeyConstraint("id", name="permission_pkey"),
    sa.ForeignKeyConstraint(
Esempio n. 8
0
import datetime

import sqlalchemy as sql

metadata = sql.MetaData()
summary = sql.Table('summary', metadata,
                    sql.Column('id', sql.Integer, primary_key=True),
                    sql.Column('result', sql.Boolean),
                    sql.Column('build_date', sql.String))

summary_item = sql.Table(
    'summary_item', metadata, sql.Column('id', sql.Integer, primary_key=True),
    sql.Column('repo_name', sql.String), sql.Column('exception', sql.String),
    sql.Column('commit_id', sql.String), sql.Column('image_id', sql.String),
    sql.Column('source_desc', sql.String), sql.Column('tag', sql.String),
    sql.Column('summary_id', None, sql.ForeignKey('summary.id')))


class SummaryV2(object):
    def __init__(self, engine, summary_id, errorlogs=None):
        self.summary_id = summary_id
        self._engine = engine
        self.errorlogs = errorlogs

    def handle_build_result(self, exc, repo, version, img_id, build_result):
        c = self._engine.connect()
        if exc and self.errorlogs:
            if isinstance(build_result, list):
                build_result = '\n'.join(build_result)
            elif not isinstance(build_result, str):
                build_result = str(build_result)
Esempio n. 9
0
    __tablename__ = 'macroarea'

    name = sa.Column(sa.Enum(*sorted(MACROAREA)), primary_key=True)

    def __repr__(self):
        return '<%s %r>' % (self.__class__.__name__, self.name)

    languoids = sa.orm.relationship('Languoid',
                                    secondary='languoid_macroarea',
                                    order_by='Languoid.id',
                                    back_populates='macroareas')


languoid_macroarea = sa.Table(
    'languoid_macroarea', _backend.Model.metadata,
    sa.Column('languoid_id', sa.ForeignKey('languoid.id'), primary_key=True),
    sa.Column('macroarea_name',
              sa.ForeignKey('macroarea.name'),
              primary_key=True))


class Country(_backend.Model):

    __tablename__ = 'country'

    id = sa.Column(sa.String(2),
                   sa.CheckConstraint('length(id) = 2'),
                   primary_key=True)
    name = sa.Column(sa.Text,
                     sa.CheckConstraint("name != ''"),
                     nullable=False,
                     unique=True)
Esempio n. 10
0
import sqlalchemy
import pymysql
from pprint import pprint

# run a select statement on a db table
engine = sqlalchemy.create_engine(
    'mysql+pymysql://root:add_pwd@localhost/sakila')
connection = engine.connect()
metadata = sqlalchemy.MetaData()
actor = sqlalchemy.Table('actor',
                         metadata,
                         autoload=True,
                         autoload_with=engine)

query = sqlalchemy.select([actor])
result_proxy = connection.execute(query)

#result_set = result_proxy.fetchall()
result_set = result_proxy.fetchmany(5)
pprint(result_set)
Esempio n. 11
0
import sqlalchemy as sa

from .base import metadata

board = sa.Table('board', metadata,
                 sa.Column('row_id', sa.Integer, primary_key=True),
                 sa.Column('name', sa.String(length=32), unique=True))
Esempio n. 12
0
import os
import databases
import sqlalchemy

DATABASE_URL = os.getenv('DATABASE_URL') or "sqlite:////db.db"
database = databases.Database(DATABASE_URL)
metadata = sqlalchemy.MetaData()

urls = sqlalchemy.Table(
    "urls",
    metadata,
    sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True),
    sqlalchemy.Column("url", sqlalchemy.String),
    sqlalchemy.Column("short_code", sqlalchemy.String, unique=True),
)

engine = sqlalchemy.create_engine(DATABASE_URL,
                                  #connect_args={"check_same_thread": False}
                                  )
metadata.create_all(engine)
Esempio n. 13
0
    TEMP_F_0500 = Column(Float)
    TEMP_F_0600 = Column(Float)
    TEMP_F_0700 = Column(Float)
    TEMP_F_0800 = Column(Float)
    TEMP_F_0900 = Column(Float)
    TEMP_F_1000 = Column(Float)
    TEMP_F_1100 = Column(Float)
    TEMP_F_1200 = Column(Float)
    TEMP_F_1300 = Column(Float)
    TEMP_F_1400 = Column(Float)
    TEMP_F_1500 = Column(Float)
    TEMP_F_1600 = Column(Float)
    TEMP_F_1700 = Column(Float)
    TEMP_F_1800 = Column(Float)
    TEMP_F_1900 = Column(Float)
    TEMP_F_2000 = Column(Float)
    TEMP_F_2100 = Column(Float)
    TEMP_F_2200 = Column(Float)
    TEMP_F_2300 = Column(Float)

    def __repr__(self):
        return f"id={self.id}, name={self.name}"


Base.metadata.create_all(engine)
metadata = MetaData(bind=engine)
metadata.reflect()
table = sqlalchemy.Table('cz_2010_usc', metadata, autoload=True)
conn.execute(table.delete())
conn.execute(table.insert(), data_dict)
def downgrade(migrate_engine):
    meta = sql.MetaData()
    meta.bind = migrate_engine

    region_table = sql.Table(_REGION_TABLE_NAME, meta, autoload=True)
    region_table.drop_column('url')
Esempio n. 15
0
def _table(*args, **kwargs):
    kwargs = kwargs.copy()
    kwargs['mysql_engine'] = 'InnoDB'
    return sqlalchemy.Table(*args, **kwargs)
def Get_Twitter_Data_As_Pandas(C_Key,
                               C_Secret,
                               A_Token,
                               A_Token_Secret,
                               Max_Tweets=100,
                               Filters=["bitcoin"],
                               Table_Name="tmp_tweets",
                               New_Table_Columns=None,
                               Query=None,
                               Tweet_Data_Parts=None,
                               Temporary=True,
                               *args,
                               **kwargs):
    """
   
    C_Key,
        String - Twitter Consumer Key
    C_Secret, 
        String - Twitter Consumer Secret
    A_Token,
        String - Twitter Access Token
    A_Token_Secret, 
        String - Twitter Access Token Secret
    Max_Tweets= 100,
        INT - Number of tweets to extract
    Filters = None, 
        List(String) - What words to filter on
        Default - ["Trump"]
    Table_Name = None,
        String - The name of your new table (default is tmp_tweets)
    New_Table_Columns = "(date DATETIME, username VARCHAR(20), tweet VARCHAR(280))",
        List(String) - SQL format tuple of string pairs for column name and type e.g. ['time DATETIME', 'age INT(2)']'
    Query = None,
        String - SQL query to execute in database table
    Tweet_Data_Parts = None
        List(String/List(String)) - Parts of the tweet json (according to twitter) to extract e.g. [{"user":"******"}, text'] is default
        Time is automatically added in to database
    Temporary = True,
        Bool - Store Tweets in the Database temporarily or permanently
        Default = True
    
    """

    if not Query:
        Query = sqlalchemy.select([sqlalchemy.Text(Table_Name)])
    else:
        pass

    auth = tweepy.OAuthHandler(consumer_key=C_Key, consumer_secret=C_Secret)
    auth.set_access_token(A_Token, A_Token_Secret)

    db_connection = MySQLdb.connect("127.0.0.1",
                                    "root",
                                    "redcard",
                                    "tweet_store",
                                    charset='utf8mb4')
    cursor = db_connection.cursor()

    tweet_add_milestone = int(Max_Tweets / 5)

    # ## Define a class to listen to the twitter API
    # If we want to use twitter data and/or a database other than the default then define this custom listener:
    if Tweet_Data_Parts and New_Table_Columns:

        class Stream_Listener(tweepy.StreamListener):
            def __init__(self,
                         api=None,
                         Max_Tweets_=None,
                         Table_Name_=None,
                         New_Table_Columns_=None,
                         Tweet_Data_Parts_=None):
                super().__init__()
                self.num_tweets = 0
                self.max_tweets = Max_Tweets_
                self.table_name = Table_Name_
                self.tweet_data_parts = Tweet_Data_Parts_
                self.new_table_columns = New_Table_Columns_

                # For creating, create table if not default
                # Below line  is hide your warning
                cursor.execute("SET sql_notes = 0; ")

                # create table here....

                exec_stmt = str(
                    "CREATE TABLE IF NOT EXISTS " + self.table_name + " " +
                    self.new_table_columns +
                    " CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci")

                cursor.execute(exec_stmt)
                db_connection.commit()

            def on_data(self, data):
                if self.num_tweets < self.max_tweets:
                    all_data = json.loads(data)
                    data_parts = ()

                    for part in self.tweet_data_parts:
                        if isinstance(part, str):
                            if part == "created_at":
                                time_created = datetime.datetime.strptime(
                                    "%a %b %d %H:%M:%S %z %Y")
                                data_parts += time_created
                            else:
                                data_parts += (all_data[part], )

                        elif isinstance(part, dict):
                            data_parts += (all_data[part.key()][part.item()], )

                        else:
                            raise ValueError(
                                "The Listed Tweet_Data_Part was not either of type dict or str"
                            )

                    num_inserted_vars = len(data_parts)

                    exec_stmt = str(
                        "INSERT INTO " + self.table_name,
                        "(" + str(", ".join(self.new_table_columns)) + ")",
                        "VALUES",
                        str(("%s, " for i in range(num_inserted_vars))))

                    cursor.execute(exec_stmt, data_parts)

                    db_connection.commit()

                    if self.num_tweets % tweet_add_milestone == 0:
                        print("Successfully added tweet. Number:",
                              self.num_tweets + 1)
                    self.num_tweets += 1

                    return True

                else:
                    print("Finished writing to table:", self.table_name)
                    return False

            def on_error(self, status):
                print("Error Code:", status)

    #If we haven't proper;y defined how to create and insert into the database then raise an error
    elif (not Tweet_Data_Parts) != (not New_Table_Columns):
        raise ValueError(
            "Need both New Table Columns and Tweet_Data_Parts to specify alternative tweet data collection"
        )

    #Otherwise use the default listener and database/table
    else:

        class Stream_Listener(tweepy.StreamListener):
            def __init__(self,
                         api=None,
                         Max_Tweets_=None,
                         Table_Name_=None,
                         New_Table_Columns_=None,
                         Tweet_Data_Parts_=None):
                super().__init__()
                self.num_tweets = 0
                self.max_tweets = Max_Tweets_
                self.table_name = Table_Name_
                self.tweet_data_parts = Tweet_Data_Parts_
                self.new_table_columns = New_Table_Columns_

                # For creating, create table if not default
                # Below line  is hide your warning
                cursor.execute("SET sql_notes = 0; ")
                # create table here....

                exec_stmt = str(
                    "CREATE TABLE IF NOT EXISTS " + self.table_name +
                    " (date DATETIME, username VARCHAR(20), tweet VARCHAR(280)) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci"
                )

                cursor.execute(exec_stmt)
                db_connection.commit()

            def on_data(self, data):
                if self.num_tweets < self.max_tweets:
                    all_data = json.loads(data)
                    tweet = all_data["text"]
                    username = all_data["user"]["screen_name"]

                    cur_time = datetime.datetime.strptime(
                        all_data["created_at"], "%a %b %d %H:%M:%S %z %Y")

                    exec_stmt = str(
                        "INSERT INTO " + self.table_name +
                        " (date, username, tweet) VALUES (%s, %s, %s)")

                    cursor.execute(exec_stmt, (cur_time, username, tweet))

                    db_connection.commit()

                    if self.num_tweets % tweet_add_milestone == 0:
                        print("Successfully added tweet. Number:",
                              self.num_tweets + 1)
                    self.num_tweets += 1

                    return True

                else:
                    print("Finished writing to table:", self.table_name)

                    return False

            def on_error(self, status):
                print("Error Code:", status)

    #Initialise the stream listener
    listener = Stream_Listener(Max_Tweets_=Max_Tweets,
                               Table_Name_=Table_Name,
                               New_Table_Columns_=New_Table_Columns,
                               Tweet_Data_Parts_=Tweet_Data_Parts)
    #Authenticate the listener
    data_stream = tweepy.Stream(auth, listener)

    #Add filters
    data_stream.filter(track=Filters)

    # ## Read the tweets database  to Pandas
    # First create the engine to connect to the database
    engine = sqlalchemy.create_engine(
        'mysql+mysqldb://root:[email protected]/tweet_store')
    #Set up a metadata object to track table metadata
    meta_data = sqlalchemy.MetaData()
    tweet_table = sqlalchemy.Table(Table_Name,
                                   meta_data,
                                   autoload=True,
                                   autoload_with=engine)
    #Establish the database connection
    connection = engine.connect()
    #Create the query and execute it
    stmt = sqlalchemy.select([tweet_table])
    results = connection.execute(stmt).fetchall()

    df = pd.DataFrame(results)

    if Temporary:
        exec_stmt = "DELETE FROM " + Table_Name
        cursor.execute(exec_stmt)
        db_connection.commit()

        connection.execute("FLUSH TABLES " + Table_Name)

        stmt = "SELECT COUNT(*) FROM " + Table_Name
        num_rows = connection.execute(stmt).scalar()

        if num_rows == 0:
            print("Table Deleted!")
    else:
        pass

    return df
import binascii
import hashlib
import os

import sqlalchemy as sa
from sqlalchemy import orm

from app.utils.database import ModelBase

user_scopes = sa.Table(
    "userscope",
    ModelBase.metadata,
    sa.Column("user_id",
              ModelBase.id.type,
              sa.ForeignKey("user.id"),
              primary_key=True),
    sa.Column("scope_id",
              ModelBase.id.type,
              sa.ForeignKey("scope.id"),
              primary_key=True),
)


class User(ModelBase):
    email = sa.Column(sa.String(255), nullable=False, index=True, unique=True)
    password = sa.Column(sa.String(255))
    first_name = sa.Column(sa.String(120))
    last_name = sa.Column(sa.String(120))
    is_active = sa.Column(sa.Boolean, nullable=False, default=True)
    last_login = sa.Column(sa.DateTime, nullable=True)
    scopes = orm.relationship("Scope", secondary=user_scopes)
def Stream_Twitter_Data_MYSQL(C_Key,
                              C_Secret,
                              A_Token,
                              A_Token_Secret,
                              Max_Tweets=100,
                              Filters=["Bitcoin"],
                              Table_Name="tmp_tweets",
                              New_Table_Columns=None,
                              Tweet_Data_Parts=None,
                              *args,
                              **kwargs):
    """
   
    C_Key,
        String - Twitter Consumer Key
    C_Secret, 
        String - Twitter Consumer Secret
    A_Token,
        String - Twitter Access Token
    A_Token_Secret, 
        String - Twitter Access Token Secret
    Max_Tweets= 100,
        INT - Number of tweets to extract
    Filters = None, 
        List(String) - What words to filter on
        Default - ["Trump"]
    Table_Name = None,
        String - The name of your new table (default is tmp_tweets)
    New_Table_Columns = "(date DATETIME, username VARCHAR(20), tweet VARCHAR(280))",
        List(String) - SQL format tuple of string pairs for column name and type e.g. ['time DATETIME', 'age INT(2)']'
    Tweet_Data_Parts = None
        List(String/List(String)) - Parts of the tweet json (according to twitter) to extract e.g. [{"user":"******"}, text'] is default
        Time is automatically added in to database
    Temporary = True,
        Bool - Store Tweets in the Database temporarily or permanently
        Default = True
    
    """
    exit_code = subprocess.check_call(["mysql", "server", "start"])
    if exit_code == 0:
        pass

    else:
        raise Warning(
            "Mysql server did not start, may want to start server manually")

    time.sleep(5)

    auth = tweepy.OAuthHandler(consumer_key=C_Key, consumer_secret=C_Secret)
    auth.set_access_token(A_Token, A_Token_Secret)

    db_connection = MySQLdb.connect("127.0.0.1",
                                    "root",
                                    "redcard",
                                    "tweet_store",
                                    charset='utf8mb4')
    cursor = db_connection.cursor()

    tweet_add_milestone = int(Max_Tweets / 5)

    # ## Define a class to listen to the twitter API
    # If we want to use twitter data and/or a database other than the default then define this custom listener:

    class Stream_Listener(tweepy.StreamListener):
        def __init__(self,
                     api=None,
                     Max_Tweets_=None,
                     Table_Name_=None,
                     New_Table_Columns_=None,
                     Tweet_Data_Parts_=None):
            super().__init__()
            self.num_tweets = 0
            self.max_tweets = Max_Tweets_
            self.table_name = Table_Name_
            self.tweet_data_parts = Tweet_Data_Parts_
            self.new_table_columns = New_Table_Columns_

            # For creating, create table if not default
            # Below line  is hide your warning
            cursor.execute("SET sql_notes = 0; ")
            # create table here....

            exec_stmt = str(
                "CREATE TABLE IF NOT EXISTS " + self.table_name +
                " (date DATETIME, username VARCHAR(20), tweet VARCHAR(280)) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;"
            )

            cursor.execute(exec_stmt)
            db_connection.commit()

        def on_data(self, data):
            if self.num_tweets < self.max_tweets:
                all_data = json.loads(data)
                tweet = all_data["text"]
                username = all_data["user"]["screen_name"]

                cur_time = datetime.datetime.strptime(
                    all_data["created_at"], "%a %b %d %H:%M:%S %z %Y")

                exec_stmt = str("INSERT INTO " + self.table_name +
                                " (date, username, tweet) VALUES (%s, %s, %s)")

                cursor.execute(exec_stmt, (cur_time, username, tweet))

                db_connection.commit()

                if self.num_tweets % tweet_add_milestone == 0 or self.num_tweets == 0:
                    print("Successfully added tweet. Number:",
                          self.num_tweets + 1)
                self.num_tweets += 1

                return True

            else:
                print("Finished writing to table:", self.table_name)

                return False

        def on_error(self, status):
            print("Error Code:", status)

    #Initialise the stream listener
    listener = Stream_Listener(Max_Tweets_=Max_Tweets,
                               Table_Name_=Table_Name,
                               New_Table_Columns_=New_Table_Columns,
                               Tweet_Data_Parts_=Tweet_Data_Parts)
    #Authenticate the listener
    data_stream = tweepy.Stream(auth, listener)

    #Add filters
    data_stream.filter(track=Filters)

    subprocess.check_call(["mysql.server", "stop"])

    print("Database Server Successfully written to and MySQL server stopped")

    # First create the engine to connect to the database
    engine = sqlalchemy.create_engine(
        'mysql+mysqldb://root:[email protected]/tweet_store')
    #Set up a metadata object to track table metadata
    meta_data = sqlalchemy.MetaData()
    tweet_table = sqlalchemy.Table(Table_Name,
                                   meta_data,
                                   autoload=True,
                                   autoload_with=engine)
    #Establish the database connection
    connection = engine.connect()

    return db_connection
Esempio n. 19
0
from app.core.config import DATABASE_URL_STR
import sqlalchemy

engine = sqlalchemy.create_engine(DATABASE_URL_STR)

connection = engine.connect()

metadata = sqlalchemy.MetaData()

objects = sqlalchemy.Table(
    "objects",
    metadata,
    sqlalchemy.Column("id", sqlalchemy.String),
    sqlalchemy.Column("name", sqlalchemy.String),
    sqlalchemy.Column("description", sqlalchemy.String),
    sqlalchemy.Column("self_uri", sqlalchemy.String),
    sqlalchemy.Column("size", sqlalchemy.BigInteger),
    sqlalchemy.Column("created_time", sqlalchemy.DateTime),
    sqlalchemy.Column("updated_time", sqlalchemy.DateTime),
    sqlalchemy.Column("version", sqlalchemy.String),
    sqlalchemy.Column("mime_type", sqlalchemy.String),
    sqlalchemy.Column("aliases", sqlalchemy.String),
)

checksums = sqlalchemy.Table(
    "checksums",
    metadata,
    sqlalchemy.Column("object_id", sqlalchemy.String),
    sqlalchemy.Column("type", sqlalchemy.String),
    sqlalchemy.Column("checksum", sqlalchemy.String),
)
        wss += calculate_wss(df_limit)
        
    return wss


# Load the data from database
    
# Build database connection

engine = db.create_engine('mssql+pyodbc://adminuser:Yxcvbnm@[email protected]/ProMi?driver=ODBC+Driver+17+for+SQL+Server')
con = engine.connect()
metadata = db.MetaData(schema = 'PROM')

# Get event log from database

table = db.Table('Event_Log_Compliant',metadata,autoload = True, autoload_with=engine)
ResultProxy= con.execute(db.select([table]))
ResultSet = ResultProxy.fetchall()
df_export = pd.DataFrame(ResultSet)
df_export.columns = ResultSet[0].keys()


#df_export = pd.read_csv('C:/Users/DEVMEYE3/Documents/Master/BPI Challenge 2019/Exports/CLUSTER_02_02_compliant_INV_before_GR_without_SRM.txt', sep = ',', encoding = 'utf-8')


df_total = df_export.filter(items = ['_case_concept_name_', '_event_concept_name_'])

# Calculate total within-sum-of-squares

#wss_total = calculate_wss(df_total)
Esempio n. 21
0
 def _table_from_schema(self,
                        name,
                        schema,
                        database: str | None = None) -> sqlalchemy.Table:
     columns = self._columns_from_schema(name, schema)
     return sqlalchemy.Table(name, self.meta, schema=database, *columns)
Esempio n. 22
0
from bs4 import BeautifulSoup
from sqlalchemy.dialects.postgresql import insert
from celery import Celery, Task
from datetime import datetime
from sqlalchemy.orm import sessionmaker, scoped_session
from requests.exceptions import RequestException

# Init SQLAlchemy
db_engine = sa.create_engine(
    'postgres://postgres@pgbouncer:6432/celery-worker-demo',
    pool_recycle=3600,
    pool_size=10)
db_metadata = sa.MetaData()

table_domains = sa.Table('domains',
                         db_metadata,
                         autoload=True,
                         autoload_with=db_engine)
table_result = sa.Table('result',
                        db_metadata,
                        autoload=True,
                        autoload_with=db_engine)


def db_new_session():
    return scoped_session(
        sessionmaker(autocommit=False, autoflush=False, bind=db_engine))


# Init Celery
#  ./app/entrypoint.sh to see the starting command
Esempio n. 23
0
# revision identifiers, used by Alembic.
revision = 'whatever-rev'
down_revision = 'whatever-down-rev'

from alembic import op
import sqlalchemy as sa

import uuid

from yourmodel import somemodel

standardattrs = sa.Table(
    'standardattributes', sa.MetaData(),
    sa.Column('id', sa.BigInteger(), primary_key=True, autoincrement=True),
    sa.Column('resource_type', sa.String(length=255), nullable=False),
    sa.Column('created_at', sa.DateTime(), nullable=False),
    sa.Column('updated_at', sa.DateTime(), nullable=False))


def get_values():
    session = sa.orm.Session(bind=op.get_bind())
    values = []

    with session.begin(subtransactions=True):
        for row in session.query(somemodel.YourOrmClaas).all():
            created_at = rec.standard_attr.created_at
            updated_at = rec.standard_attr.updated_at
            std_attr = session.execute(
                # NOTE **{} is a little trick to pass pylink test
                # it doesn't impact migraion job. Ref:
                # - https://github.com/PyCQA/pylint/issues/722
Esempio n. 24
0
import datetime
import sqlalchemy
from flask_login import UserMixin
from sqlalchemy import orm
from sqlalchemy_serializer import SerializerMixin

from .db_session import SqlAlchemyBase

association_table = sqlalchemy.Table(
    'association', SqlAlchemyBase.metadata,
    sqlalchemy.Column('jobs', sqlalchemy.Integer,
                      sqlalchemy.ForeignKey('jobs.id')),
    sqlalchemy.Column('category', sqlalchemy.Integer,
                      sqlalchemy.ForeignKey('category.id')))


class Category(SqlAlchemyBase, UserMixin, SerializerMixin):
    __tablename__ = 'category'
    id = sqlalchemy.Column(sqlalchemy.Integer,
                           primary_key=True,
                           autoincrement=True)
    name = sqlalchemy.Column(sqlalchemy.String, nullable=True)


class Jobs(SqlAlchemyBase, UserMixin, SerializerMixin):
    __tablename__ = 'jobs'

    id = sqlalchemy.Column(sqlalchemy.Integer,
                           primary_key=True,
                           autoincrement=True)
    # team_leader = sqlalchemy.Column(sqlalchemy.Integer, nullable=True)
Esempio n. 25
0
def get_metadef_properties_table(meta):
    return sqlalchemy.Table('metadef_properties', meta, autoload=True)
Esempio n. 26
0
class TransitionDb():
    transition = sa.Table(
        'transition', meta,
        sa.Column('id', sa.Integer, primary_key=True, nullable=False),
        sa.Column('name', sa.String(80), nullable=False, unique=True),
        sa.Column('strategy_name', sa.String(120), unique=False),
        sa.Column('object', sa.String(120), unique=False),
        sa.Column('duration', sa.Integer, unique=False),
        sa.Column('customize_parameter', sa.String(1024), unique=False))

    @classmethod
    async def queryDbByName(cls, conn, name):
        result = await conn.execute(
            cls.transition.select().where(cls.transition.c.name == name))
        transition = await result.first()
        if transition:
            return cls(transition.name, transition.strategy_name,
                       transition.object, transition.customize_parameter,
                       transition.id)
        else:
            return None

    @classmethod
    async def queryDbById(cls, conn, id):
        result = await conn.execute(
            cls.transition.select().where(cls.transition.c.id == id))
        transition = await result.first()
        return cls(transition.name, transition.strategy_name,
                   transition.object, transition.customize_parameter,
                   transition.id)

    @classmethod
    async def getAll(cls, conn):
        result = await conn.execute(cls.transition.select())
        records = await result.fetchall()
        transitions = []
        for transition in records:
            transitions.append(
                cls(transition.name, transition.strategy_name,
                    transition.object, transition.customize_parameter,
                    transition.id))
        return transitions

    def __init__(self, name, strategyName, object, parameters, id=''):
        self.name = name
        self.strategyName = strategyName
        self.object = object
        self.duration = 100
        self.customizeParameter = parameters
        self.id = id

    def toJSON(self):
        return json.dumps(self.toDict())

    def toDict(self):
        return {
            "id": self.id,
            "name": self.name,
            "strategyName": self.strategyName,
            "object": self.object,
            "duration": self.duration,
            "customizeParameter": self.customizeParameter
        }

    async def insertDb(self, conn):
        id = await conn.scalar(TransitionDb.transition.insert().values(
            name=self.name,
            strategy_name=self.strategyName,
            object=self.object,
            duration=self.duration,
            customize_parameter=self.customizeParameter))
        if not id:
            print("Insert Transition id:{id} failed.".format(id=id))
        else:
            self.id = id

    async def updateDb(self, conn):
        result = await conn.execute(TransitionDb.transition.update().returning(
            *TransitionDb.transition.c).where(
                TransitionDb.transition.c.id == self.id).values(
                    id=self.id,
                    name=self.name,
                    strategy_name=self.strategyName,
                    object=self.object,
                    duration=self.duration,
                    customize_parameter=self.customizeParameter))
        record = await result.fetchone()
        if not record:
            print("Update Transition id:{id} failed.".format(id=self.id))

    def updateFromDict(self, dict):

        self.name = dict['name']
        # strategy changed, the customize parameter maybe changed too
        # update customize parameter later by quaring isParameterDirty()
        if (self.strategyName == dict['strategyName']):
            self.customizeParameter = dict['customizeParameter']
        else:
            self.customizeParameter = ''

        self.strategyName = dict['strategyName']
        self.object = dict['object']
        self.duration = dict['duration']

    def isParametersEmpty(self):
        return self.customizeParameter == ''

    def setCustomizeParameter(self, parameters):
        self.customizeParameter = parameters

    def __repr__(self):
        return '<User %r>' % self.name
Esempio n. 27
0
def upload(request):
    #context ={}
    if request.method == 'POST':
        upload_file = request.FILES['document']
        engine = db.create_engine(
            'mysql://*****:*****@210.179.174.148:3306/enc_bigdata?charset=utf8',
            convert_unicode=False)
        connection = engine.raw_connection()
        metadata = db.MetaData()
        print(upload_file.name)

        if '스케쳐스출고' in upload_file.name:
            #엑셀 mysql 업로드하기
            df = pd.read_excel(
                io='C:/Users/Public/Documents/' + upload_file.name,
                sheet_name='Sheet1',
                engine='openpyxl',
                converters={
                    'owr_cd': str,
                    'post_no': str,
                    'postno2': str,
                    'etc_mtter2': str
                },
                usecols=[
                    'dlivr_decsn_oprto_cd', 'dlivr_no', 'delvr_cd',
                    'dlivr_prgrs_stts_nm', 'btch_prces_no', 'sppm_key_no',
                    'dlivr_tpe_sctin_nm', 'dlivr_stts_sctin_nm', 'order_dtm',
                    'rcrt_phsph_nm', 'dlivr_prrrg_dtm', 'dlivr_cmple_dtm',
                    'dlivr_orer', 'owr_cd', 'prlst_cd', 'dlivr_prrrg_qntt',
                    'dlivr_cmple_qntt', 'bscnn_cd', 'owr_nm',
                    'order_tpe_sctin_nm', 'delvr_prrrg_dtm', 'delvr_dtm',
                    'delvr_prrt', 'wrhs_cd', 'dt_acto_stre_dlivr_no',
                    'dlivr_wrhs_nm', 'shpmn_dirct_no', 'bscnn_nm',
                    'dlgds_offce_cd', 'dlgds_offce_nm', 'dlgds_offce_bss_addr',
                    'rcrt_phsph_dtadd', 'post_no', 'postno2', 'pinch_mtlno',
                    'pinch_email', 'bscnn_rfrnc_nm', 'assgn_group_nm', 'rmrk',
                    'etc_mtter1', 'etc_mtter2', 'mvmnt_key_no',
                    'trnso_headr_no', 'dlivr_order_crtin_dtm',
                    'dlivr_rgter_id1', 'dlivr_updt_dtm', 'dlivr_upusr_cd',
                    'dlivr_oprto_pc_cd1', 'order_cstmr_nm', 'order_cstmr_tlno',
                    'order_cstmr_mtlno', 'rcrt_phsph_tlno', 'clssc_key_no',
                    'clssc_lne_no', 'order_key_no', 'order_lne_no',
                    'prlst_group_cd', 'prlst_brcd_no', 'lt_attrb_nm1',
                    'lt_attrb_nm2', 'lt_attrb_nm3', 'lt_attrb_nm4',
                    'lt_attrb_nm5', 'un_ldng_sctin_val', 'etc_sctin_val',
                    'crsdk_qntt', 'assgn_qntt', 'pckn_qntt', 'clssc_qntt',
                    'dlivr_cncll_qntt', 'sal_unprc_amt', 'sal_amt', 'vat',
                    'goods_rmrk', 'invnr_lck_cont', 'lt_rsrvt_sctin_cont',
                    'crsdk_sctin_val', 'mvmnt_lne_no', 'sppm_lne_no',
                    'dlivr_rgstn_dtm', 'dlivr_rgter_id2', 'dlivr_prces_dtm',
                    'dlivr_oprto_pc_cd2', 'spml_prlst_no', 'ordno1', 'ordno2',
                    'rcrt_phsph_fxno', 'decsn_qntt', 'prlst_nm',
                    'dlivr_decsn_dtm', 'dlivr_prces_stts_cd',
                    'ctdl_delay_dcnt', 'bscnn_dlivr_rcipt_dtm',
                    'cmptn_dlivr_prces_delay_dcnt'
                ])
            table = db.Table('tb_en_skx_dlivr_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_skx_dlivr_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_skx_dlivr_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '스케쳐스입고' in upload_file.name:
            #엑셀 mysql 업로드하기
            df = pd.read_excel(
                io='C:/Users/Public/Documents/' + upload_file.name,
                sheet_name='Sheet1',
                engine='openpyxl',
                converters={
                    'bscnn_cd': str,
                    'cmpn_cd': str,
                    'owr_cd': str
                },
                usecols=[
                    'wrhsn_no', 'bscnn_cd', 'bscnn_nm', 'wrhsn_type_cd',
                    'prlst_brcd_no', 'wrhsn_stts_sctin_nm', 'wrhsn_prrrg_dtm',
                    'wrhsn_cmple_dtm', 'wrhs_cd', 'lt_attrb_nm1',
                    'prrrg_wrhsn_qntt', 'rtwr_qntt', 'wrhsn_wrhs_nm',
                    'bscnn_bss_addr', 'wrhsn_dirct_no', 'bscnn_dtadd', 'rmrk',
                    'cmpn_cd', 'cmpn_nm', 'wrhsn_prlst_no', 'prlst_group_cd',
                    'prcrc_dtm', 'prlst_cd', 'owr_cd', 'prlst_nm',
                    'prlst_rmrk', 'wrhsn_prces_stts_cd', 'owr_nm',
                    'bscnn_wrhsn_rcipt_dtm', 'wrhsn_dirct_pinch_nm'
                ])
            table = db.Table('tb_en_skx_wrhsn_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_skx_wrhsn_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_skx_wrhsn_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '스케쳐스반품' in upload_file.name:
            #엑셀 mysql 업로드하기
            df = pd.read_excel(io='C:/Users/Public/Documents/' +
                               upload_file.name,
                               sheet_name='Sheet1',
                               engine='openpyxl',
                               converters={
                                   'owr_cd': str,
                                   'cmpn_cd': str,
                                   'companycd1': str
                               })
            table = db.Table('tb_en_skx_rtgds_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_skx_rtgds_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            print('프로시저 시작')
            cursor = connection.cursor()
            print('프로시저 중간')
            cursor.callproc("tb_en_skx_rtgds_pro")
            results = list(cursor.fetchall())
            connection.commit()
            print('프로시저 끝')
            cursor.close()
        elif '쏨니아출고' in upload_file.name:
            #엑셀 mysql 업로드하기
            # df = pd.read_excel(io='C:/Users/Public/Documents/'+upload_file.name,sheet_name='Sheet1', engine='openpyxl',converters={'BSCNN_CD':str, 'OWR_CD':str, 'POST_NO':str})
            df = pd.read_excel(io='C:/Users/Public/Documents/' +
                               upload_file.name,
                               sheet_name='Sheet1',
                               engine='openpyxl',
                               converters={
                                   'bscnn_cd': str,
                                   'owr_cd': str,
                                   'post_no': str
                               })
            table = db.Table('tb_en_som_dlivr_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_som_dlivr_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_som_dlivr_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '쏨니아입고' in upload_file.name:
            #엑셀 mysql 업로드하기
            # df = pd.read_excel(io='C:/Users/Public/Documents/'+upload_file.name,sheet_name='Sheet1', engine='openpyxl',converters={'BSCNN_CD':str, 'CMPN_CD':str, 'OWR_CD':str})
            df = pd.read_excel(io='C:/Users/Public/Documents/' +
                               upload_file.name,
                               sheet_name='Sheet1',
                               engine='openpyxl',
                               converters={
                                   'bscnn_cd': str,
                                   'cmpn_cd': str,
                                   'owr_cd': str
                               })
            table = db.Table('tb_en_som_wrhsn_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_som_wrhsn_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_som_wrhsn_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '쏨니아반품' in upload_file.name:
            #엑셀 mysql 업로드하기
            # df = pd.read_excel(io='C:/Users/Public/Documents/'+upload_file.name,sheet_name='Sheet1', engine='openpyxl',converters={'BSCNN_CD':str, 'CMPN_CD':str, 'OWR_CD':str})
            df = pd.read_excel(io='C:/Users/Public/Documents/' +
                               upload_file.name,
                               sheet_name='Sheet1',
                               engine='openpyxl',
                               converters={
                                   'bscnn_cd': str,
                                   'cmpn_cd': str,
                                   'owr_cdOWR_CD': str
                               })
            table = db.Table('tb_en_som_rtgds_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_som_rtgds_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            #print('프로시저 시작')
            #cursor = connection.cursor()
            #print('프로시저 중간')
            #cursor.callproc("tb_en_som_rtgds_pro")
            #results = list(cursor.fetchall())
            #connection.commit()
            #print('프로시저 끝')
            #cursor.close()
        elif '공동물류출고' in upload_file.name:
            #엑셀 mysql 업로드하기
            df = pd.read_excel(io='C:/Users/Public/Documents/' +
                               upload_file.name,
                               sheet_name='Sheet1',
                               engine='openpyxl')
            table = db.Table('tb_en_com_dlivr_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_com_dlivr_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_com_dlivr_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '공동물류입고' in upload_file.name:
            #엑셀 mysql 업로드하기
            # df = pd.read_excel(io='C:/Users/Public/Documents/'+upload_file.name,sheet_name='Sheet1', engine='openpyxl'
            # ,usecols=[
            #     'OWR_CMPN_NM','IN_DIRCT_NO_1','PRCRC_NO','WRHSN_DT_1','WRHSN_TPE','WRHSN_ORER','SUPPLY_MEN_CD','ROW_NO','WRHSN_PRRRG_DT','WRHSN_DECSN_DT'
            #    ,'STTS','DTLS_STTS','ERR_MES_1','RMRK_1','RGTER_1','UPUSR_1','RGSTN_DT_1','UPDT_DT_1','UPL_ORER_1','IN_DIRCT_NO_2'
            #    ,'GOODS_CD_1','MNFT_DT','EXPDATE','WAIT_ZNE','WRHSN_DT_2','PRRRG_QNTT','ACCPN_QNTT','CAU','GOODS_STTS','IN_LOT'
            #    ,'ACCPN_STTS','ACCPN_DECSN_DT','ADJT_STTS','RMRK_2','ERR_MES_2','RGTER_2','UPUSR_2','RGSTN_DT_2','UPDT_DT_2','UPL_ORER_2'
            #    ,'TPE','GOODS_CD_2','GOODS_NM','USE_WHTHR','BRCD_1','BRCD_2','BRCD_3','HNDLE_CTON','UNIT_CD','UNIT_NM'
            #    ,'SPLR','SET_GOODS_WHTHR','ACQRE_SCTIN','WRHSN_UNPRC','DLIVR_UNPRC','VAT_TPE','LRGE_CLSS','MDUM_CLSS','SML_CLSS','GOODS_BRND'
            #    ,'GOODS_LNE','PROKND','GNDR','CTGRY','PRDCT_YR','PRDCT_SSN','PRONMBR','SIZE','COLOR','GOODS_SCTIN'
            #    ,'STRG_TPE','STNDRD','BX_PER_QNTT','PLLT_PER_QNTT','PLLT_PER_BX_QNTT','WDTH','LNGTH','HGHT','WGHT','BX_CBM'
            #    ,'THR_SDE_SUM','PIECE_PER_CBM','INVNR_MNGMNT','RCPTNDSBR_MNGMNT','LOT_MNGMNT','DSTRBTN_PRCSS_WHETHER','ASSGNMNT_WHETHER','EXPIRYDATE_MNGMNT_WHETHER','CNSLDTN_DSLLWNCE_WHETHER','UNIT_DLIVR'
            #    ,'RMRK_3','RGSTN_DT_3','RGTER_3','UPDT_DT_3','UPUSR_3'
            # ])
            df = pd.read_excel(
                io='C:/Users/Public/Documents/' + upload_file.name,
                sheet_name='Sheet1',
                engine='openpyxl',
                usecols=[
                    'owr_cmpn_nm', 'in_dirct_no1', 'prcrc_no', 'wrhsn_dt1',
                    'wrhsn_tpe', 'wrhsn_orer', 'supply_men_cd', 'row_no',
                    'wrhsn_prrrg_dt', 'wrhsn_decsn_dt', 'stts', 'dtls_stts',
                    'err_mes1', 'rmrk1', 'rgter1', 'upusr1', 'rgstn_dt1',
                    'updt_dt1', 'upl_orer1', 'in_dirct_no2', 'goods_cd1',
                    'mnft_dt', 'expdate', 'wait_zne', 'wrhsn_dt2',
                    'prrrg_qntt', 'accpn_qntt', 'cau', 'goods_stts', 'in_lot',
                    'accpn_stts', 'accpn_decsn_dt', 'adjt_stts', 'rmrk2',
                    'err_mes2', 'rgter2', 'upusr2', 'rgstn_dt2', 'updt_dt2',
                    'upl_orer2', 'tpe', 'goods_cd2', 'goods_nm', 'use_whthr',
                    'brcd_1', 'brcd_2', 'brcd_3', 'hndle_cton', 'unit_cd',
                    'unit_nm', 'splr', 'set_goods_whthr', 'acqre_sctin',
                    'wrhsn_unprc', 'dlivr_unprc', 'vat_tpe', 'lrge_clss',
                    'mdum_clss', 'sml_clss', 'goods_brnd', 'goods_lne',
                    'proknd', 'gndr', 'ctgry', 'prdct_yr', 'prdct_ssn',
                    'pronmbr', 'size', 'color', 'goods_sctin', 'strg_tpe',
                    'stndrd', 'bx_per_qntt', 'pllt_per_qntt',
                    'pllt_per_bx_qntt', 'wdth', 'lngth', 'hght', 'wght',
                    'bx_cbm', 'thr_sde_sum', 'piece_per_cbm', 'invnr_mngmnt',
                    'rcptndsbr_mngmnt', 'lot_mngmnt', 'dstrbtn_prcss_whether',
                    'assgnmnt_whether', 'expirydate_mngmnt_whether',
                    'cnsldtn_dsllwnce_whether', 'unit_dlivr', 'rmrk3',
                    'rgstn_dt3', 'rgter3', 'updt_dt3', 'upusr3'
                ])
            table = db.Table('tb_en_com_wrhsn_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_com_wrhsn_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_com_wrhsn_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '공동물류반품' in upload_file.name:
            #엑셀 mysql 업로드하기
            # df = pd.read_excel(io='C:/Users/Public/Documents/'+upload_file.name,sheet_name='Sheet1', engine='openpyxl'
            # ,usecols=[
            #     'OWR_CMPN_NM','IN_DIRCT_NO_1','INVCE_NO','RTGDS_WRHSN_DT','RTGDS_DECSN_DT','RTGDS_TPE','RTGDS_PLC','RTGDS_MEN_NM','ORER_NO','CURR'
            #    ,'ADDR','RTGDS_COST_SCTIN','CAU','TLNO','RMRK_1','RTGDS_STTS','RGTER_1','RGSTN_DT_1','UPUSR_1','UPDT_DT_1'
            #    ,'ORGNL_INVCE_NO','CUST_USE_NO','IN_DIRCT_NO_2','GOODS_CD_1','RTGDS_QNTT','NRWL_QNTT','FULTY_QNTT','MNFT_DT','DSTRBTN_DT','IN_LOT'
            #    ,'RGTER_2','RGSTN_DT_2','UPUSR_2','UPDT_DT_2','GOODS_CD_2','GOODS_NM','USE_WHTHR','BRCD_1','BRCD_2','BRCD_3'
            #    ,'HNDLE_CTON','UNIT_CD','UNIT_NM','SPLR','SET_GOODS_WHTHR','ACQRE_SCTIN','WRHSN_UNPRC','DLIVR_UNPRC','VAT_TPE','LRGE_CLSS'
            #    ,'MDUM_CLSS','SML_CLSS','GOODS_BRND','GOODS_LNE','PROKND','GNDR','CTGRY','PRDCT_YR','PRDCT_SSN','PRONMBR'
            #    ,'SIZE','COLOR','GOODS_SCTIN','STRG_TPE','STNDRD','BX_PER_QNTT','PLLT_PER_QNTT','PLLT_PER_BX_QNTT','WDTH','LNGTH'
            #    ,'HGHT','WGHT','BX_CBM','THR_SDE_SUM','PIECE_PER_CBM','INVNR_MNGMNT','RCPTNDSBR_MNGMNT','LOT_MNGMNT','DSTRBTN_PRCSS_WHETHER','ASSGNMNT_WHTHR'
            #    ,'EXPIRYDATE_MNGMNT_WHETHER','CNSLDTN_DSLLWNCE_WHETHER','UNIT_DLIVR','RMRK','RGSTN_DT_3','RGTER_3','UPDT_DT_3','UPUSR_3'
            # ])
            df = pd.read_excel(
                io='C:/Users/Public/Documents/' + upload_file.name,
                sheet_name='Sheet1',
                engine='openpyxl',
                usecols=[
                    'owr_cmpn_nm', 'in_dirct_no1', 'invce_no',
                    'rtgds_wrhsn_dt', 'rtgds_decsn_dt', 'rtgds_tpe',
                    'rtgds_plc', 'rtgds_men_nm', 'orer_no', 'curr', 'addr',
                    'rtgds_cost_sctin', 'cau', 'tlno', 'rmrk1', 'rtgds_stts',
                    'rgter1', 'rgstn_dt1', 'upusr1', 'updt_dt1',
                    'orgnl_invce_no', 'cust_use_no', 'in_dirct_no2',
                    'goods_cd1', 'rtgds_qntt', 'nrwl_qntt', 'fulty_qntt',
                    'mnft_dt', 'dstrbtn_dt', 'in_lot', 'rgter2', 'rgstn_dt2',
                    'upusr2', 'updt_dt2', 'goods_cd2', 'goods_nm', 'use_whthr',
                    'brcd_1', 'brcd_2', 'brcd_3', 'hndle_cton', 'unit_cd',
                    'unit_nm', 'splr', 'set_goods_whthr', 'acqre_sctin',
                    'wrhsn_unprc', 'dlivr_unprc', 'vat_tpe', 'lrge_clss',
                    'mdum_clss', 'sml_clss', 'goods_brnd', 'goods_lne',
                    'proknd', 'gndr', 'ctgry', 'prdct_yr', 'prdct_ssn',
                    'pronmbr', 'size', 'color', 'goods_sctin', 'strg_tpe',
                    'stndrd', 'bx_per_qntt', 'pllt_per_qntt',
                    'pllt_per_bx_qntt', 'wdth', 'lngth', 'hght', 'wght',
                    'bx_cbm', 'thr_sde_sum', 'piece_per_cbm', 'invnr_mngmnt',
                    'rcptndsbr_mngmnt', 'lot_mngmnt', 'dstrbtn_prcss_whether',
                    'assgnmnt_whthr', 'expirydate_mngmnt_whether',
                    'cnsldtn_dsllwnce_whether', 'unit_dlivr', 'rmrk',
                    'rgstn_dt3', 'rgter3', 'updt_dt3', 'upusr3'
                ])
            table = db.Table('tb_en_com_rtgds_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_com_rtgds_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_com_rtgds_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '마이창고출고' in upload_file.name:
            #엑셀 mysql 업로드하기
            # df = pd.read_excel(io='C:/Users/Public/Documents/'+upload_file.name,sheet_name='Sheet1', engine='openpyxl',converters={'BRCD':str}
            # ,usecols=['DLIVR_DT','CSTMR_NM','GOODS_NM','MDL_NM','BRCD','QNTT'])
            df = pd.read_excel(
                io='C:/Users/Public/Documents/' + upload_file.name,
                sheet_name='Sheet1',
                engine='openpyxl',
                converters={'brcd': str},
                usecols=[
                    'dlivr_dt', 'cstmr_nm', 'goods_nm', 'mdl_nm', 'brcd',
                    'qntt'
                ])
            table = db.Table('tb_en_myw_dlivr_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_myw_dlivr_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_myw_dlivr_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '마이창고입고' in upload_file.name:
            #엑셀 mysql 업로드하기
            # df = pd.read_excel(io='C:/Users/Public/Documents/'+upload_file.name,sheet_name='Sheet1', engine='openpyxl',converters={'BRCD':str}
            # ,usecols=['WRHSN_DT','CSTMR_NM','GOODS_NM','MDL_NM','BRCD','WRHSN_QNTT','RMRK'])
            df = pd.read_excel(
                io='C:/Users/Public/Documents/' + upload_file.name,
                sheet_name='Sheet1',
                engine='openpyxl',
                converters={'brcd': str},
                usecols=[
                    'wrhsn_dt', 'cstmr_nm', 'goods_nm', 'mdl_nm', 'brcd',
                    'wrhsn_qntt', 'rmrk'
                ])
            table = db.Table('tb_en_myw_wrhsn_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_myw_wrhsn_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_myw_wrhsn_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '마이창고반품' in upload_file.name:
            #엑셀 mysql 업로드하기
            # df = pd.read_excel(io='C:/Users/Public/Documents/'+upload_file.name,sheet_name='Sheet1', engine='openpyxl'
            # ,usecols=['PRGRS_STTN','SCTIN','RQST_DT','PCIPT_DT','RCLL_DT','CSTMR_NM','ARRVL_PLC','SNR','PRXSTNC_INVCE','RTGDS_INVCE','GOODS_NM'])
            df = pd.read_excel(
                io='C:/Users/Public/Documents/' + upload_file.name,
                sheet_name='Sheet1',
                engine='openpyxl',
                usecols=[
                    'prgrs_sttn', 'sctin', 'rqst_dt', 'pcipt_dt', 'rcll_dt',
                    'cstmr_nm', 'arrvl_plc', 'snr', 'prxstnc_invce',
                    'rtgds_invce', 'goods_nm'
                ])
            table = db.Table('tb_en_myw_rtgds_stats_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_myw_rtgds_stats_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_myw_rtgds_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()
        elif '월별도급인력사용량' in upload_file.name:
            #엑셀 mysql 업로드하기
            # df = pd.read_excel(io='C:/Users/Public/Documents/'+upload_file.name,sheet_name='Sheet1', engine='openpyxl'
            # ,usecols=['PRGRS_STTN','SCTIN','RQST_DT','PCIPT_DT','RCLL_DT','CSTMR_NM','ARRVL_PLC','SNR','PRXSTNC_INVCE','RTGDS_INVCE','GOODS_NM'])
            df = pd.read_excel(
                io='C:/Users/Public/Documents/' + upload_file.name,
                sheet_name='Sheet1',
                engine='openpyxl',
                usecols=[
                    'lgst_cntr_nm', 'otsrc_cmpn_nm', 'sxdst_cd', 'otsrc_dtm',
                    'otsrc_nofpr_cnt'
                ])
            table = db.Table('tb_en_mnb_otsrc_hmfrc_s_02',
                             metadata,
                             autoload=True,
                             autoload_with=engine)
            df.to_sql('tb_en_mnb_otsrc_hmfrc_s_02',
                      con=engine,
                      if_exists='append',
                      index=False)
            # print('프로시저 시작')
            # cursor = connection.cursor()
            # print('프로시저 중간')
            # cursor.callproc("tb_en_mnb_otsrc_hmfrc_pro")
            # results = list(cursor.fetchall())
            # connection.commit()
            # print('프로시저 끝')
            # cursor.close()

        return redirect('/admin/excelbigdata/exceldata/')

    return render(request, 'upload.html')
Esempio n. 28
0
def get_type_table(metadata):
    table = sa.Table(g.db_app_name + '_type', metadata,
                     sa.Column('id', sa.Integer, primary_key=True),
                     sa.Column('name', sa.String, nullable=False))
    return table
Esempio n. 29
0
import sqlalchemy as sa
from . import config
import sqlalchemy.dialects.postgresql as psql

User = sa.Table(
    'users',
    config.metadata,
    sa.Column('uuid', psql.UUID, primary_key=True, default=config.uuid),
    sa.Column('username', sa.String(255)),
    sa.Column('realname', sa.String(255)),
    sa.Column('email', sa.String(255)),
    sa.Column('auth_backend', sa.ForeignKey('auth_backends.uuid')),
    sa.Column('auth_data', sa.JSON),
)
Esempio n. 30
0
 def test_table(self):
     table = sqlalchemy.Table()
     with fixtures.BannedDBSchemaOperations(['Table']):
         self.assertRaises(exception.DBNotAllowed, table.drop)
         self.assertRaises(exception.DBNotAllowed, table.alter)