Esempio n. 1
0
def get_data():
    #Connection to the database
    config = ConfigParser.ConfigParser()
    config.read(os.path.join(os.path.dirname(__file__),'../vars/vars.cfg'))
    username = config.get('database','username')
    password = config.get('database','password')

    Base = automap_base()
    engine = create_engine("postgresql://{username}:{password}@localhost/PostGIS".\
                          format(username = username,password = password),
                           client_encoding = 'utf8', echo = False,)

    metadata = MetaData()
    metadata.reflect(engine)

    db = automap_base(metadata=metadata)
    db.prepare()

    session = Session(engine)

    query = session.query(db.classes.cv_entries.cv_id,\
                         db.classes.cv_entries.group,\
                         db.classes.cv_entries.start,\
                         db.classes.cv_entries.end,\
                         db.classes.cv_entries.heading,\
                         db.classes.cv_entries.markdown,\
                         db.classes.cv_entries.summary,\
       func.ST_AsGeoJSON(func.ST_Transform(db.classes.cv_entries.geom, 3857)))\
        .order_by(desc(db.classes.cv_entries.start))


    geojson_data = []
    for i in query:
        cv_id = i[0]
        group = i[1]
        summary = i[6]
        geom = geojson.loads(i[7])
        feat = geojson.Feature(geometry=geom, properties={'cv_id': cv_id,'group': group,'summary': summary})
        geojson_data.append(feat)

    crs = {
    "type": "name",
    "properties": {
        "name": "EPSG:3857"
    }
}
    cv_geojson = geojson.FeatureCollection(geojson_data,crs=crs) 

    attributes = [(i[0],i[1],i[2],i[3],i[4],i[5]) for i in query]
    return geojson.dumps(cv_geojson),attributes
Esempio n. 2
0
    def run(self):
        try:
            print("Please waiting... Analyzing source database... please wait...")
            ## AutoMap
            self.metadata.reflect(self.engine)  # get columns from existing table
            Base = automap_base(bind=self.engine, metadata=self.metadata)
            Base.prepare(self.engine, reflect=True)
            MetaTable = Base.metadata.tables

            thread_list = []
            ## From all tables found in database
            for table in MetaTable.keys():
                if '.' in table:
                    table = str(table).split('.')[1]

                if len(config.source['tables']['exclude_tables']) > 0:
                    if exactyMatchList(config.source['tables']['exclude_tables'], table):
                        continue

                ## From config.py custom_tables
                if len(config.source['tables']['custom_tables']) > 0:
                    if not exactyMatchList(config.source['tables']['custom_tables'], table):
                        continue

                print("Preparing thread to table %s " % table)
                t = threading.Thread(target=self.export2RedShift, name='thread-' + table, args=(table,))
                thread_list.append(t)

            thread_control(thread_list, self.cfg_thread_number)
            print("Finish...")

        except (SQLAlchemyError, Exception) as e:
            print("Error: %s" % e)
def init():
    Base = automap_base()
    engine = create_engine(config.db_url)
    Base.prepare(engine, reflect=True)
    session = Session(engine)
    add_role(Base, session)
    add_user(Base, session)
Esempio n. 4
0
    def __init__(self, verbose=0, *args, **kwds):  # @UnusedVariable
        super(SbciFinanceDB, self).__init__(*args, **kwds)

        if not os.access(FINANCEDB_FILE, os.R_OK | os.W_OK):
            raise RuntimeError('cannot access Finance DB file ({}) for R/W!'
                               .format(FINANCEDB_FILE))

        self.Base = automap_base()

        self.engine = create_engine('sqlite:///' + FINANCEDB_FILE)

        self.Base.prepare(self.engine, reflect=True)

        self.Categories = self.Base.classes.categories
        self.Seasons = self.Base.classes.seasons
        self.Cheques = self.Base.classes.cheques
        self.Transactions = self.Base.classes.transactions
        self.Trybooking = self.Base.classes.trybooking

        self.dbsession = Session(self.engine)

        self.categories_query = self.dbsession.query(self.Categories)
        self.seasons_query = self.dbsession.query(self.Seasons)
        self.cheques_query = self.dbsession.query(self.Cheques)
        self.transactions_query = self.dbsession.query(self.Transactions)
        self.trybooking_query = self.dbsession.query(self.Trybooking)
Esempio n. 5
0
 def __prepare__(self):
     # create declarative base class
     self.base = automap_base()
     # create declarative classes from dbms
     self.base.prepare(self.engine, reflect=True)
     # create session for later use
     self.session = Session(self.engine)
Esempio n. 6
0
    def test_relationship_pass_params(self):
        Base = automap_base(metadata=self.metadata)

        mock = Mock()

        def _gen_relationship(
            base, direction, return_fn, attrname, local_cls, referred_cls, **kw
        ):
            mock(base, direction, attrname)
            return generate_relationship(
                base,
                direction,
                return_fn,
                attrname,
                local_cls,
                referred_cls,
                **kw
            )

        Base.prepare(generate_relationship=_gen_relationship)
        assert set(tuple(c[1]) for c in mock.mock_calls).issuperset(
            [
                (Base, interfaces.MANYTOONE, "nodes"),
                (Base, interfaces.MANYTOMANY, "keywords_collection"),
                (Base, interfaces.MANYTOMANY, "items_collection"),
                (Base, interfaces.MANYTOONE, "users"),
                (Base, interfaces.ONETOMANY, "addresses_collection"),
            ]
        )
Esempio n. 7
0
    def handle(self, *args, **options):

        engine = create_engine(get_default_db_string(), convert_unicode=True)

        metadata = MetaData()

        app_config = apps.get_app_config('content')
        # Exclude channelmetadatacache in case we are reflecting an older version of Kolibri
        table_names = [model._meta.db_table for name, model in app_config.models.items() if name != 'channelmetadatacache']
        metadata.reflect(bind=engine, only=table_names)
        Base = automap_base(metadata=metadata)
        # TODO map relationship backreferences using the django names
        Base.prepare()
        session = sessionmaker(bind=engine, autoflush=False)()

        # Load fixture data into the test database with Django
        call_command('loaddata', 'content_import_test.json', interactive=False)

        def get_dict(item):
            value = {key: value for key, value in item.__dict__.items() if key != '_sa_instance_state'}
            return value

        data = {}

        for table_name, record in Base.classes.items():
            data[table_name] = [get_dict(r) for r in session.query(record).all()]

        with open(SCHEMA_PATH_TEMPLATE.format(name=options['version']), 'wb') as f:
            pickle.dump(metadata, f, protocol=2)

        with open(DATA_PATH_TEMPLATE.format(name=options['version']), 'w') as f:
            json.dump(data, f)
Esempio n. 8
0
def init_conn():
  connection_string = 'postgres://*****:*****@localhost:5432/adna'

  from sqlalchemy.ext.automap import automap_base
  from sqlalchemy.orm import Session
  from sqlalchemy.event import listens_for
  from sqlalchemy.schema import Table
  from sqlalchemy import create_engine, Column, DateTime, MetaData, Table
  from datetime import datetime

  engine = create_engine(connection_string)

  metadata = MetaData()
  metadata.reflect(engine, only=['results', 'job'])

  Table('results', metadata,
    Column('createdAt', DateTime, default=datetime.now),
    Column('updatedAt', DateTime, default=datetime.now, 
      onupdate=datetime.now),
       extend_existing=True)

  Table('job', metadata,
    Column('createdAt', DateTime, default=datetime.now),
    Column('updatedAt', DateTime, default=datetime.now,
      onupdate=datetime.now),
       extend_existing=True)

  Base = automap_base(metadata=metadata)

  Base.prepare()

  global Results, Job, session
  Results, Job = Base.classes.results, Base.classes.job

  session = Session(engine)
Esempio n. 9
0
def create_app(database_uri):
    """
    Creates a new flask app that exposes the database
    provided as a ReSTful Application

    :param str|unicode|sqlalchemy.engine.url.URL database_uri: The database
        URI in a manner that SQLAlchemy can understand
    :return: A flask app that exposes a database as
        a ReSTful API that can be accessed using either
        the Hal or SIREN protocol
    :rtype: Flask
    """
    # Create the flask application
    app = Flask(__name__)

    # Setup SQLAlchemy to reflect the database
    engine = create_engine(database_uri)
    base = automap_base()
    base.prepare(engine, reflect=True)

    # Create the ripozo dispatcher and register the response formats
    dispatcher = FlaskDispatcher(app)
    dispatcher.register_adapters(adapters.HalAdapter, adapters.SirenAdapter)
    session_handler = ScopedSessionHandler(engine)

    # Create and register resources from the sqlalchemy models
    # We need to pass ``append_slash=True`` due to a quirk in how flask handles routing
    resources = [create_resource(model, session_handler, append_slash=True) for model in base.classes]
    dispatcher.register_resources(*resources)
    return app
Esempio n. 10
0
def collect_bnetza_data():
    """
    Collects data of wind turbines available in the BNetzA Anlagenstammdaten.

    Parameters
    ----------

    Returns
    -------
    plants : DataFrame of all wind generators in BNetzA
    """
    meta.reflect(bind=conn, schema='model_draft',
                 only=['bnetza_eeg_anlagenstammdaten_wind_classification'])
    Base = automap_base(metadata=meta)
    Base.prepare()

    Bnetza = Base.classes.bnetza_eeg_anlagenstammdaten_wind_classification

    query = session.query(Bnetza.installierte_leistung,
                          Bnetza.wea_manufacturer,
                          Bnetza.wea_type,
                          Bnetza.nabenhöhe,
                          Bnetza.rotordurchmesser).filter(Bnetza.seelage == None)
    plants = [(installierte_leistung,
               wea_manufacturer,
               wea_type,
               nabenhöhe,
               rotordurchmesser)
    for installierte_leistung, wea_manufacturer, wea_type, nabenhöhe, rotordurchmesser
    in query.all()]
    plants.sort(key=itemgetter(0))
    columns = ['capacity', 'manufacturer', 'type', 'hub', 'rotor']
    plants = pd.DataFrame(plants, columns=columns)

    return plants
Esempio n. 11
0
def get_photos_with_taxa(session, metadata):
    """Return photos with genus, section, and species class.

    This generator returns 4-tuples ``(photo, genus, section, species)``.
    """
    Base = automap_base(metadata=metadata)
    Base.prepare()
    configure_mappers()

    Photo = Base.classes.photos
    Taxon = Base.classes.taxa
    Rank = Base.classes.ranks

    stmt_genus = session.query(Photo.id, Taxon.name.label('genus')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'genus').subquery()

    stmt_section = session.query(Photo.id, Taxon.name.label('section')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'section').subquery()

    stmt_species = session.query(Photo.id, Taxon.name.label('species')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'species').subquery()

    q = session.query(Photo, 'genus', 'section', 'species').\
        join(stmt_genus, stmt_genus.c.id == Photo.id).\
        outerjoin(stmt_section, stmt_section.c.id == Photo.id).\
        join(stmt_species, stmt_species.c.id == Photo.id)

    return q
 def setupConnection(self):
     LOG.info('setupConnection:'+self.sql_url)
     if self._scoped_session:
         self._scoped_session.remove()
     try:
         engine = create_engine(self.sql_url, encoding='utf8', echo=False, pool_recycle=1)
     except:
         exc_type, exc_value, exc_traceback = sys.exc_info()
         lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
         LOG.error(''.join('!! ' + line for line in lines))
         return
     self._insp = reflection.Inspector.from_engine(engine)
     self.d_base = declarative_base(bind=engine)
     a_base = automap_base(bind=engine)
     try:
         a_base.metadata.reflect(views=True)
         self.name = unicode(self.sql_url)
     except:
         LOG.info('Unable to reflect the whole DB!')
         exc_type, exc_value, exc_traceback = sys.exc_info()
         lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
         for line in lines:
             LOG.info(line)
         a_base.metadata.reflect(views=True, only=[self.sql_table])
         self.restricted = True
         self.name = unicode(self.sql_url+'+'+self.sql_table)
     a_base.prepare(a_base.metadata.bind, name_for_collection_relationship=unique_collection)
     self.a_base = a_base
     self._scoped_session = scoped_session(sessionmaker(bind=self.a_base.metadata.bind, extension=ZopeTransactionExtension(keep_session=True), autocommit=True))
Esempio n. 13
0
    def __init__(self, engine):
        metadata = MetaData(engine)

        self._define_tables_without_primary_keys(metadata)

        self.base = automap_base(metadata=metadata)
        self.base.prepare(engine, reflect=True)
Esempio n. 14
0
def get_taxa_photo_count(session, metadata):
    """Return the photo count for each (genus, section, species) combination.

    Taxa are returned as 4-tuples ``(genus, section, species, photo_count)``.
    """
    Base = automap_base(metadata=metadata)
    Base.prepare()
    configure_mappers()

    Photo = Base.classes.photos
    Taxon = Base.classes.taxa
    Rank = Base.classes.ranks

    stmt_genus = session.query(Photo.id, Taxon.name.label('genus')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'genus').subquery()

    stmt_section = session.query(Photo.id, Taxon.name.label('section')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'section').subquery()

    stmt_species = session.query(Photo.id, Taxon.name.label('species')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'species').subquery()

    q = session.query('genus', 'section', 'species',
            functions.count(Photo.id).label('photos')).\
        select_from(Photo).\
        join(stmt_genus, stmt_genus.c.id == Photo.id).\
        outerjoin(stmt_section, stmt_section.c.id == Photo.id).\
        join(stmt_species, stmt_species.c.id == Photo.id).\
        group_by('genus', 'section', 'species')

    return q
Esempio n. 15
0
    def test_naming_schemes(self):
        Base = automap_base(metadata=self.metadata)

        def classname_for_table(base, tablename, table):
            return str("cls_" + tablename)

        def name_for_scalar_relationship(
                base, local_cls, referred_cls, constraint):
            return "scalar_" + referred_cls.__name__

        def name_for_collection_relationship(
                base, local_cls, referred_cls, constraint):
            return "coll_" + referred_cls.__name__

        Base.prepare(
            classname_for_table=classname_for_table,
            name_for_scalar_relationship=name_for_scalar_relationship,
            name_for_collection_relationship=name_for_collection_relationship
        )

        User = Base.classes.cls_users
        Address = Base.classes.cls_addresses

        u1 = User()
        a1 = Address()
        u1.coll_cls_addresses.append(a1)
        assert a1.scalar_cls_users is u1
Esempio n. 16
0
def main(src):
    Base = automap_base()
    engine = create_engine(src)
    Base.prepare(engine, reflect=True)
    collector = Collector(Resolver())
    d = collector.collect(Base.classes)
    loading.dumpfile(d, format="json")
Esempio n. 17
0
    def test_o2m_relationship_cascade(self):
        Base = automap_base(metadata=self.metadata)
        Base.prepare()

        configure_mappers()

        b_rel = Base.classes.a.b_collection
        assert not b_rel.property.cascade.delete
        assert not b_rel.property.cascade.delete_orphan
        assert not b_rel.property.passive_deletes

        assert b_rel.property.cascade.save_update

        c_rel = Base.classes.a.c_collection
        assert c_rel.property.cascade.delete
        assert c_rel.property.cascade.delete_orphan
        assert not c_rel.property.passive_deletes

        assert c_rel.property.cascade.save_update

        d_rel = Base.classes.a.d_collection
        assert d_rel.property.cascade.delete
        assert d_rel.property.cascade.delete_orphan
        assert d_rel.property.passive_deletes

        assert d_rel.property.cascade.save_update

        e_rel = Base.classes.a.e_collection
        assert not e_rel.property.cascade.delete
        assert not e_rel.property.cascade.delete_orphan
        assert e_rel.property.passive_deletes

        assert e_rel.property.cascade.save_update
Esempio n. 18
0
    def _automap(self, e):
        Base = automap_base()

        Base.prepare(e, reflect=True)

        time.sleep(.01)
        configure_mappers()
Esempio n. 19
0
def collect_ego_turbines():
    """
    Collects data of wind turbines used in the eGo database.

    Parameters
    ----------

    Returns
    -------
    generators : capacity of turbines used in the eGo database
    """
    meta.reflect(bind=conn, schema='model_draft',
                 only=['ego_dp_supply_res_powerplant'])
    Base = automap_base(metadata=meta)
    Base.prepare()

    Dp = Base.classes.ego_dp_supply_res_powerplant

    query = session.query(Dp.electrical_capacity).\
                        filter(and_(Dp.generation_subtype == 'wind_onshore',\
                        Dp.electrical_capacity < 7600 ,\
                        Dp.start_up_date > '1998-01-01 00:00:00',\
                        Dp.start_up_date < '2018-01-01 00:00:00'))

    Gens = [(electrical_capacity) for electrical_capacity in query.all()]
    generators = []
    for i in range(0, len(Gens)):
        generators.append(float(Gens[i][0]))
    generators.sort()

    return generators
Esempio n. 20
0
    def reflect_model(self, table_name, bind_key=None):
        """ 反向生成 ORM 的 Model
        :param table_name:
        :param bind_key:
        :return: ORMClass
        """
        with self._reflect_lock:
            if table_name in self._models:
                return self._models[table_name]

            engine = self.get_engine(bind_key)
            meta = MetaData(bind=engine)
            meta.reflect(only=[table_name])

            table = meta.tables[table_name]
            self._tables[table_name] = table

            Base = automap_base(metadata=meta)
            Base.prepare()

            model = getattr(Base.classes, table_name)
            model.__table__.metadata = None
            self._models[table_name] = model

            return model
Esempio n. 21
0
def base_app():
    global app
    app = Flask('backend')
    app.config.overrides = {}

    logging.info('Connecting to database...')
    engine = create_engine(backend.settings.db_connection)
    app.engine = engine
    base = automap_base()
    base.prepare(engine, reflect=True)

    session = Session(engine)
    app.base = base
    app.session = session
    app.Decl_Base = declarative_base()
    
    from backend.models import Members
    app.Decl_Base.metadata.create_all(app.engine)

    # Create the Flask-Restless API manager.
    app.api_manager = flask.ext.restless.APIManager(app, session=app.session)
    app.api_manager.create_api(Members, methods=['GET', 'POST', 'PATCH', 'DELETE'], collection_name='members')

    return app

    @app.route("/shutdown", methods=["POST"])   # pragma: no cover
    def shutdown():  # pragma: no cover
        logging.info('shutting down server')
        shutdown_server()
        return "server shutting down"
Esempio n. 22
0
    def test_conditional_relationship(self):
        Base = automap_base()

        def _gen_relationship(*arg, **kw):
            return None
        Base.prepare(
            engine=testing.db, reflect=True,
            generate_relationship=_gen_relationship)
Esempio n. 23
0
 def __init__(self, conn):
     db = conn
     self.engine = create_engine(db)
     connect = self.engine.connect()
     Base = automap_base()
     Base.prepare(self.engine, reflect=True)
     self.session = Session(self.engine)
     self.classes = Base.classes
Esempio n. 24
0
def load_class(name):
    name = name.lower()
    if name not in classes:
        db.Model.metadata.reflect(db.engine)
        Base = automap_base()
        Base.prepare(db.engine, reflect=True)
        classes[name] = Base.classes.get(name)
    return classes[name]
  def set_connection(self, db_connect_string):
    self.metadata = MetaData()
    self.base = automap_base(metadata=self.metadata)
    self.engine = create_engine(db_connect_string)
    #self.engine.raw_connection().connection.text_factory = str

    self.session = sessionmaker(bind=self.engine)() 
    self.tables_md()
Esempio n. 26
0
    def __init__(self, db_url):
        super(AutomappedDB, self).__init__(automap_base(), db_url)

        from .db_check import is_sane_database

        with self.sessionmaker() as session:
            if not is_sane_database(MCDeclarativeBase, session):
                raise RuntimeError('database {0} does not match expected schema'.format(db_url))
Esempio n. 27
0
def get_photos(session, metadata):
    """Return photo records from the database."""
    Base = automap_base(metadata=metadata)
    Base.prepare()
    configure_mappers()
    Photo = Base.classes.photos
    photos = session.query(Photo)
    return photos
Esempio n. 28
0
 def setup(self):
     if self._engine:
         RuntimeError("Only call setup once!")
     self._engine = sqlalchemy.create_engine(self._create_db_string())
     self._connection = self._engine.connect()
     self._base = automap_base()
     self._session_factory = sqlalchemy.orm.sessionmaker(bind=self._engine)
     self._base.prepare(self._engine, reflect = True)
Esempio n. 29
0
 def __init__(self, parent=None):
     self.engine = create_engine('mysql+mysqlconnector://root:[email protected]/wienwahl')
     self.connection = self.engine.connect()
     Base = automap_base()
     Base.prepare(self.engine, reflect=True)
     self.session = Session(self.engine)
     self.classes = Base.classes
     self.wahltermin = "2015-10-11"
Esempio n. 30
0
    def create_session(cls, database_url):
        engine = create_engine(database_url, module=sqlite)

        # reflect the tables
        model_detector = automap_base()
        model_detector.prepare(engine, reflect=True)

        return _Model(model_detector, Session(engine))
Esempio n. 31
0
def ml_metadata():
    engine = create_engine("sqlite:///cities_db_3.sqlite")
    # reflect an existing database into a new model
    Base = automap_base()  # AUTO MAP OR DECLARATIVE?
    # reflect the tables
    Base.prepare(engine, reflect=True)
    # Save references to table
    Industry = Base.classes.industry
    # Create our session (link) from Python to the DB
    session = Session(engine)
    """Return all data for a city."""
    sel = [
        Industry.city_state,
        Industry.houses_total,
        Industry.pop_18_to_24_total,
        Industry.renters_total,
        Industry.house_units,
        Industry.population,
        Industry.establishments_total,
        Industry.year,
        Industry.agriculture_forest_fish_hunt_ratio,
        Industry.mine_quarry_oil_gas_extraction_ratio,
        Industry.utilities_ratio,
        Industry.construction_ratio,
        Industry.manufacturing_ratio,
        Industry.wholesale_trade_ratio,
        Industry.retail_ratio,
        Industry.transport_warehousing_ratio,
        Industry.information_ratio,
        Industry.finance_insurance_ratio,
        Industry.realestate_rent_lease_ratio,
        Industry.professional_scientific_tech_services_ratio,
        Industry.mgmt_companies_enterprises_ratio,
        Industry.admin_support_waste_management_services_ratio,
        Industry.educational_services_ratio,
        Industry.health_social_assistance_ratio,
        Industry.arts_entertainment_and_recreation_ratio,
        Industry.accommodation_food_services_ratio,
        #Other_ratio col bugged
        # Industry.other_ratio,
        Industry.industries_not_classified_ratio,
        Industry.commute_time_per_person
    ]

    # results = session.query(*sel).order_by(Cities.population.desc()).limit(100).all()
    results2 = session.query(*sel).all()

    # Create a dictionary entry for each city's information
    ml_dict = {}
    ml_list = []

    # Formatting data to mirror GeoJSON
    for result in results2:
        ml_metadata_dict = {}
        ml_metadata_dict["city_state"] = result[0]
        ml_metadata_dict["houses_total"] = result[1]
        ml_metadata_dict["pop_18_to_24_total"] = result[2]
        ml_metadata_dict["renters_total"] = result[3]
        ml_metadata_dict["house_units"] = result[4]
        ml_metadata_dict["population"] = result[5]
        ml_metadata_dict["establishments_total"] = result[6]
        ml_metadata_dict["year"] = result[7]
        ml_metadata_dict[
            "biz_gagriculture_forest_fish_hunt_ratiorowth_Y"] = result[8]
        ml_metadata_dict["mine_quarry_oil_gas_extraction_ratio"] = result[9]
        ml_metadata_dict["utilities_ratio"] = result[10]
        ml_metadata_dict["construction_ratio"] = result[11]
        ml_metadata_dict["manufacturing_ratio"] = result[12]
        ml_metadata_dict["wholesale_trade_ratio"] = result[13]
        ml_metadata_dict["retail_ratio"] = result[14]
        ml_metadata_dict["transport_warehousing_ratio"] = result[15]
        ml_metadata_dict["information_ratio"] = result[16]
        ml_metadata_dict["finance_insurance_ratio"] = result[17]
        ml_metadata_dict["realestate_rent_lease_ratio"] = result[18]
        ml_metadata_dict[
            "professional_scientific_tech_services_ratio"] = result[19]
        ml_metadata_dict["mgmt_companies_enterprises_ratio"] = result[20]
        ml_metadata_dict[
            "admin_support_waste_management_services_ratio"] = result[21]
        ml_metadata_dict["educational_services_ratio"] = result[22]
        ml_metadata_dict["health_social_assistance_ratio"] = result[23]
        ml_metadata_dict["arts_entertainment_and_recreation_ratio"] = result[
            24]
        ml_metadata_dict["accommodation_food_services_ratio"] = result[25]
        #other_ratio col bugged
        # ml_metadata_dict["other_ratio"] = result[26]
        ml_metadata_dict["industries_not_classified_ratio"] = result[26]
        ml_metadata_dict["commute_time_per_person"] = result[27]

        #INSERT ADDITIONAL VARS HERE
        ml_list.append(ml_metadata_dict)
        #ALLOWS ACCES TO ALL DATA BY DICT.KEY FOR MAPPING LEAFLET
    ml_dict = {"keys": ml_list}
    return jsonify(ml_dict)
Esempio n. 32
0
def insertQuote(data):
    import sqlalchemy
    from sqlalchemy.ext.automap import automap_base
    from sqlalchemy.orm import Session
    from sqlalchemy import create_engine, insert
    from datetime import datetime
    from config import local_db

    engine = create_engine(local_db)

    Base = automap_base(bind=engine)
    Base.prepare(engine, reflect=True)

    DPquotes = Base.classes.dp_quotes
    session = Session(engine)
    sys_timestamp = str(datetime.now())
    bsc_name = data.get('bsc-name', None)
    newQuote = DPquotes(
        bsc_name=bsc_name,
        rep_name=data.get('rep-name', None),
        cx_name=data.get('client-name', None),
        cx_sapid=data.get('sap-id', None),
        sys_timestamp=sys_timestamp,
        quote_date=data.get("quote-date", None),
        batch_users=data.get("batch-users", None),
        dp_edition=data.get('edition', None),
        ent_failover=data.get('ent-failover', None),
        ent_failover_count=data.get('ent-failover-count', None),
        ent_multi=data.get('ent-multi', None),
        ent_multi_count=data.get('ent-multi-count', None),
        ent_offloadmfp_count=data.get('ent-offloadmfp-count', None),
        ent_offloadmobile_count=data.get('ent-offloadmobile-count', None),
        ent_offloadweb_count=data.get('ent-offloadweb-count', None),
        ent_workflowa_count=data.get('ent-workflowa-count', None),
        ent_workflowb_count=data.get('ent-workflowb-count', None),
        input_count=data.get('input-count', None),
        maint_years=data.get('maint-years', None),
        mobile_users=data.get('mobile-users', None),
        offloadmfp=data.get('offloadmfp', None),
        offloadmobile=data.get('offloadmobile', None),
        offloadweb=data.get('offloadweb', None),
        option_barcode1d=data.get('option-barcode1d', None),
        option_barcode2d=data.get('option-barcode2d', None),
        option_acpupgrade=data.get('option-acpupgrade', None),
        option_advbates=data.get('option-advbates', None),
        option_advocr=data.get('option-advocr', None),
        batchindex=data.get('batchindex', None),
        option_bookbildbndl=data.get('option-bookbildbndl', None),
        option_bubgrader=data.get('option-bubgrader', None),
        option_connconn=data.get('option-connconn', None),
        option_convoffice=data.get('option-convoffice', None),
        option_convpdf=data.get('option-convpdf', None),
        option_copydef=data.get('option-copydef', None),
        option_distrobndl=data.get('option-distrobndl', None),
        option_dropboxin=data.get('option-dropboxin', None),
        option_emailbndl=data.get('option-emailbndl', None),
        option_fileparse=data.get('option-fileparse', None),
        option_fluxconn=data.get('option-fluxconn', None),
        option_formsproc=data.get('option-formsproc', None),
        option_hl7=data.get('option-hl7', None),
        option_hsredact=data.get('option-hsredact', None),
        option_kdkgencon=data.get('option-kdkgencon', None),
        option_laserfiche=data.get('option-laserfiche', None),
        option_lprinput=data.get('option-lprinput', None),
        option_metabndl=data.get('option-metabndl', None),
        mobile=data.get('mobile', None),
        option_ocrasiafont=data.get('option-ocrasiafont', None),
        option_odbc=data.get('option-odbc', None),
        option_onbase=data.get('option-onbase', None),
        option_pdfproc=data.get('option-pdfproc', None),
        option_pgcolroute=data.get('option-pgcolroute', None),
        option_powertools=data.get('option-powertools', None),
        option_printfile=data.get('option-printfile', None),
        release2me=data.get('release2me', None),
        option_rxshield=data.get('option-rxshield', None),
        option_sharepoint=data.get('option-sharepoint', None),
        option_winfax=data.get('option-winfax', None),
        option_workshare=data.get('option-workshare', None),
        workstation=data.get('workstation', None),
        option_worldox=data.get('option-worldox', None),
        price_level=data.get('price-level', None),
        release2me_devices=data.get('release2me-devices', None),
        workflowa=data.get('workflowa', None),
        workflowb=data.get('workflowb', None),
        workstation_users=data.get('workstation-users', None),
        sec_consult=data.get('sec-consult-hours', None),
        sec_develop=data.get('sec-develop-hours', None),
        kmbs_install=data.get('kmbs-install-hours', None),
        sec_remote=data.get('sec-remote', None))
    session.add(newQuote)
    session.commit()

    session = Session(engine)
    query = session.query(DPquotes.quote_id).filter(
        DPquotes.sys_timestamp == sys_timestamp).filter(
            DPquotes.bsc_name == bsc_name).one()
    session.commit()
    engine.dispose()

    return query[0]
Esempio n. 33
0
def getMSQuote(quote_id):
    import sqlalchemy
    from sqlalchemy.ext.automap import automap_base
    from sqlalchemy.orm import Session
    from sqlalchemy import create_engine, func, inspect, MetaData
    from splinter import Browser
    import pandas as pd
    import time
    import re
    from config import local_db

    engine = create_engine(local_db)

    Base = automap_base(bind=engine)
    Base.prepare(engine, reflect=True)

    DPitems = Base.classes.dp_items
    MSquotes = Base.classes.ms_quotes
    session = Session(engine)

    query = session.query(MSquotes).filter(MSquotes.quote_id == quote_id).one()

    data = query.__dict__

    executable_path = {'executable_path': '/usr/local/bin/chromedriver'}

    browser = Browser('chrome', **executable_path, headless=True)

    browser.visit(data['maint_url'])
    time.sleep(1)

    scope = browser.find_by_tag("table").find_by_css(".ng-scope")
    edition = browser.find_by_css(".label")[0].text

    scopelist = []
    for item in scope:
        itemid = re.match(r"(^\d+)\s*\:\s*(.+)\s\(\s*(\d)", item.text)

        if itemid != None:
            scopelist.append([itemid[1], itemid[3]])
        else:
            continue

    browser.quit()
    scope_df = pd.DataFrame(scopelist, columns=['itemno', 'ms'])
    vcounts_df = pd.DataFrame(
        scope_df['itemno'].value_counts()).rename(columns={
            "itemno": "qty"
        }).reset_index(drop=False)
    scope_df = scope_df.drop_duplicates().sort_values('itemno').reset_index(
        drop=True)
    scope_df['qty'] = vcounts_df['qty']
    scope_df['ms'] = scope_df['ms'].astype('int32')
    res_df = scope_df.loc[scope_df['ms'] == data['maint_years']].reset_index(
        drop=True)

    df = pd.DataFrame()

    for item in res_df['itemno']:
        qty = int(res_df.loc[res_df['itemno'] == item]['qty'])
        sap_num = int(res_df.loc[res_df['itemno'] == item]['itemno'])

        query = session.query(DPitems.sap_num, DPitems.item_desc, DPitems.plist, DPitems.friendlyname, DPitems.shortid)\
                                        .filter(DPitems.sap_num == sap_num)\
                                        .all()
        core_dict = {
            'sap_num': str(query[0][0]),
            'item_desc': query[0][1],
            'price': query[0][2],
            'fname': query[0][3],
            'shortid': query[0][4],
            'qty': qty
        }

        df = df.append(core_dict, ignore_index=True)

    df['ext_price'] = df['price'] * df['qty']
    config = df.to_dict(orient='records')
    total_price = df['ext_price'].sum()
    result = {
        "total_price": total_price,
        "quote_id": data['quote_id'],
        "bsc_name": data['bsc_name'],
        "rep_name": data['rep_name'],
        "cx_name": data['cx_name'],
        "cx_sapid": data['cx_sapid'],
        "quote_date": data['quote_date'],
        "maint_url": data['maint_url'],
        "maint_years": data['maint_years'],
        "unlock": data['unlock'],
        "rid": data['rid'],
        "mrid": data['mrid'],
        "pcode": data['pcode'],
        "edition": edition,
        "config": config
    }

    session.commit()
    engine.dispose()

    return result
def setup_db(pois_table_name, count_table_name, source):
    Base = automap_base()
    # Connect to the database
    #db = create_engine("sqlite:///../../../databases/google_places.db")
    db = create_engine('postgresql://*****:*****@localhost/pois')
    # create object to manage table definitions
    metadata = MetaData(db)
    # create table if it doesn't exist - also define the table.
    # change it in GTable as well!
    if source == "google":
        create_google_pois_table(db, pois_table_name, metadata)
    elif source == "matching_table":
        create_matching_table(db, pois_table_name, metadata)
    elif source == "FSQ":
        create_fsq_pois_table(db, pois_table_name, metadata)
        Base.prepare(db, reflect=True)
        STable = getattr(Base.classes, pois_table_name)
        #CTable = getattr(Base.classes, count_table_name)
        # create a Session
        session = Session(db)
        return session, STable, []

    elif source == "fsq_matched":
        create_fsq_matched_pois_table(db, pois_table_name, metadata)

    elif source == "google_matched":
        create_google_matched_pois_table(db, pois_table_name, metadata)

    elif source == "twitter":
        create_twitter_pois_table(db, pois_table_name, metadata)
        # Base.prepare(db, reflect=True)
        # TTable = getattr(Base.classes, pois_table_name)
        # # create a Session
        # session = Session(db)
        # return session, TTable
    elif source == "gsv":
        create_gsv_pois_table(db, pois_table_name, metadata)
        # Base.prepare(db, reflect=True)
        # STable = getattr(Base.classes, pois_table_name)
        # # create a Session
        # session = Session(db)
        # return session, STable
    elif source == "scene_features":
        create_scene_features_table(db, pois_table_name, metadata)
        # Base.prepare(db, reflect=True)
        # STable = getattr(Base.classes, pois_table_name)
        # # create a Session
        # session = Session(db)
        # return session, STable
    elif source == "text_features":
        create_text_features_table(db, pois_table_name, metadata)
    elif source == "similarities":
        create_similarities_table(db, pois_table_name, metadata)
    elif source == "coco":
        create_object_detection_coco_table(db, pois_table_name, metadata)
    elif source == "oid":
        create_object_detection_oid_table(db, pois_table_name, metadata)
    elif source == "gf":
        create_gf_features_table(db, pois_table_name, metadata)
    elif source == "reviews":
        create_reviews_table(db, pois_table_name, metadata)
    elif source == "review_features":
        create_review_features_table(db, pois_table_name, metadata)
    elif source == "spatial":
        create_spatial_features_table(db, pois_table_name, metadata)
    else:
        print("ERROR, none of the right options were given")
        return 0
    try:
        #create_count_per_poi_table(db, count_table_name, metadata)
        # reflect the tables
        Base.prepare(db, reflect=True)
        STable = getattr(Base.classes, pois_table_name)
    except Exception as err:
        print("err", err)
    #CTable = getattr(Base.classes, count_table_name)
    #data_table = Table('google_ams_centroids_40', metadata, autoload=True)

    # create a Session
    session = Session(db)
    return session, STable  #, CTable
Esempio n. 35
0
def getEditionDetails(edition):
    import sqlalchemy
    from sqlalchemy.ext.automap import automap_base
    from sqlalchemy.orm import Session
    from sqlalchemy import create_engine, func, inspect, MetaData
    from config import local_db
    engine = create_engine(local_db)

    Base = automap_base(bind=engine)
    Base.prepare(engine, reflect=True)

    DP_nodes = Base.classes.dp_nodes
    DPitems = Base.classes.dispatcher_phoenix
    DP_coreopts = Base.classes.dp_coreopts
    session = Session(engine)

    conf_query = session.query(DPitems.sap_num, DPitems.item_desc, DPitems.plist, DPitems.friendlyname)\
            .filter(DPitems.friendlyname == edition)\
            .all()

    pconf = {
        "item_num": conf_query[0][0],
        "desc": conf_query[0][1],
        "price": conf_query[0][2]
    }

    if edition == 'Foundations':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_foundations == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname,
                                  DP_coreopts.shortid).filter(
                                      DP_coreopts.foundations == 'true').all()

    elif edition == 'Professional':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_professional == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.professional == 'true').all()

    elif edition == 'Office':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_office == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.office == 'true').all()

    elif edition == 'Legal':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_legal == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.legal == 'true').all()

    elif edition == 'Healthcare':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_healthcare == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.healthcare == 'true').all()

    elif edition == 'Government':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_government == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.government == 'true').all()

    elif edition == 'Finance':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_finance == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.finance == 'true').all()

    elif edition == 'Education':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_education == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.education == 'true').all()

    elif edition == 'ECM':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_ecm == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.ecma == 'true').all()

    elif edition == 'ECM Basic':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_ecm_basic == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.ecmb == 'true').all()

    elif edition == 'AccurioPro Connect':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_ecm_basic == True)\
                .all()
        opt_query = session.query(DP_coreopts.friendlyname, DP_coreopts.shortid)\
                .filter(DP_coreopts.acpconn == 'true').all()

    input_nodes = []
    process_nodes = []
    distribute_nodes = []

    for item in node_query:
        if item[1] == 'input':
            input_nodes.append(item[0])
        if item[1] == 'process':
            process_nodes.append(item[0])
        if item[1] == 'distribute':
            distribute_nodes.append(item[0])

    result = {
        'pconf': pconf,
        'inc_opts': opt_query,
        'inc_nodes': {
            'input': input_nodes,
            'process': process_nodes,
            'distribute': distribute_nodes
        }
    }

    session.commit()
    engine.dispose()

    return result
Esempio n. 36
0
def getQuote(quote_id):
    import sqlalchemy
    from sqlalchemy.ext.automap import automap_base
    from sqlalchemy.orm import Session
    from sqlalchemy import create_engine, func, inspect, MetaData
    import pandas as pd

    engine = create_engine(local_db)

    Base = automap_base(bind=engine)
    Base.prepare(engine, reflect=True)

    DPquotes = Base.classes.dp_quotes
    session = Session(engine)

    query = session.query(DPquotes).filter(DPquotes.quote_id == quote_id).one()

    data = query.__dict__

    options = []
    for item, state in data.items():
        if state == 'on':
            spl_option = item.split('_')
            if spl_option[0] == 'option':
                options.append(spl_option[1].upper())

    msufx = "-M" + str(data['maint_years'])

    countGrps = {
        "input_count": int(data['input_count']),
        "mobile_users": int(data['mobile_users']),
        "release2me_devices": int(data['release2me_devices']),
        "batch_users": int(data['batch_users']),
        "workstation_users": int(data['workstation_users'])
    }

    failoverCount = int(data["ent_failover_count"])

    serviceGrps = {
        "SEC-CONSULT": data['sec_consult'],
        "SEC-DEVELOP": data['sec_develop'],
        "KMBS-INSTALL": data['kmbs_install'],
        "SEC-REMOTE": data['sec_remote']
    }
    # svcs = ['sec_consult','sec_develop','kmbs_install','sec_remote']
    # serviceGrps = {}
    # for svc in svcs:
    #         if data[svc] != None:
    #                 serviceGrps.update()

    multi_count = int(data["ent_multi_count"])

    offloadGrps = {
        "OFFLOADMFP": int(data["ent_offloadmfp_count"]),
        "OFFLOADMOBILE": int(data["ent_offloadmobile_count"]),
        "OFFLOADWEB": int(data["ent_offloadweb_count"]),
        "OFFLOADWORKFLOWA": int(data["ent_workflowa_count"]),
        "OFFLOADWORKFLOWB": int(data["ent_workflowb_count"])
    }

    def countBreaker(countGrps):
        counts = {}
        for grp in countGrps:
            if countGrps[grp] > 0:
                if grp == 'input_count':
                    input_count = countGrps[grp]
                    i_count = 0
                    x_count = 0
                    xxv_count = 0
                    l_count = 0
                    c_count = 0
                    d_count = 0

                    while input_count >= 500:
                        d_count += 1
                        input_count -= 500
                    while input_count >= 100:
                        c_count += 1
                        input_count -= 100
                    while input_count >= 50:
                        l_count += 1
                        input_count -= 50
                    while input_count >= 25:
                        xxv_count += 1
                        input_count -= 25
                    while input_count >= 10:
                        x_count += 1
                        input_count -= 10
                    while input_count >= 1:
                        i_count += 1
                        input_count -= 1

                    counts["ADDINPUT-1"] = i_count
                    counts["ADDINPUT-10"] = x_count
                    counts["ADDINPUT-25"] = xxv_count
                    counts["ADDINPUT-50"] = l_count
                    counts["ADDINPUT-100"] = c_count
                    counts["ADDINPUT-500"] = d_count

                if grp == 'mobile_users':
                    mobile_count = countGrps[grp]
                    x_count = 0
                    xxv_count = 0
                    l_count = 0
                    c_count = 0
                    ccl_count = 0
                    d_count = 0
                    m_count = 0

                    while mobile_count >= 1000:
                        m_count += 1
                        mobile_count -= 1000
                    while mobile_count >= 500:
                        d_count += 1
                        mobile_count -= 500
                    while mobile_count >= 100:
                        c_count += 1
                        mobile_count -= 100
                    while mobile_count >= 250:
                        ccl_count += 1
                        mobile_count -= 250
                    while mobile_count >= 50:
                        l_count += 1
                        mobile_count -= 50
                    while mobile_count >= 25:
                        xxv_count += 1
                        mobile_count -= 25
                    while mobile_count >= 10:
                        x_count += 1
                        mobile_count -= 10
                    if mobile_count < 10 and mobile_count > 0:
                        x_count += 1
                    counts["MOBILE-10"] = x_count
                    counts["MOBILE-25"] = xxv_count
                    counts["MOBILE-50"] = l_count
                    counts["MOBILE-100"] = c_count
                    counts["MOBILE-250"] = ccl_count
                    counts["MOBILE-500"] = d_count
                    counts["MOBILE-1000"] = m_count

                if grp == "release2me_devices":
                    input_count = countGrps[grp]
                    i_count = 0
                    iii_count = 0
                    v_count = 0
                    x_count = 0
                    xxv_count = 0
                    l_count = 0
                    c_count = 0
                    d_count = 0

                    while input_count >= 500:
                        d_count += 1
                        input_count -= 500
                    while input_count >= 100:
                        c_count += 1
                        input_count -= 100
                    while input_count >= 50:
                        l_count += 1
                        input_count -= 50
                    while input_count >= 25:
                        xxv_count += 1
                        input_count -= 25
                    while input_count >= 10:
                        x_count += 1
                        input_count -= 10
                    while input_count >= 5:
                        v_count += 1
                        input_count -= 5
                    while input_count >= 3:
                        iii_count += 1
                        input_count -= 3
                    while input_count >= 1:
                        i_count += 1
                        input_count -= 1

                    counts["RELEASE2ME-1"] = i_count
                    counts["RELEASE2ME-3"] = iii_count
                    counts["RELEASE2ME-5"] = v_count
                    counts["RELEASE2ME-10"] = x_count
                    counts["RELEASE2ME-25"] = xxv_count
                    counts["RELEASE2ME-50"] = l_count
                    counts["RELEASE2ME-100"] = c_count
                    counts["RELEASE2ME-500"] = d_count

                if grp == "batch_users":
                    input_count = countGrps[grp]
                    i_count = 0
                    i_count_a = 0
                    x_count = 0

                    while input_count > 10:
                        x_count += 1
                        input_count -= 1
                    while input_count > 1:
                        i_count_a += 1
                        input_count -= 1
                    while input_count == 1:
                        i_count += 1
                        input_count -= 1

                    counts["BATCHIND-1"] = i_count
                    counts["BATCHIND-1-ADD"] = i_count_a
                    counts["BATCHIND-10"] = x_count

                if grp == "workstation_users":
                    input_count = countGrps[grp]
                    i_count = 0
                    i_count_a = 0
                    x_count = 0

                    while input_count > 10:
                        x_count += 1
                        input_count -= 1
                    while input_count > 1:
                        i_count_a += 1
                        input_count -= 1
                    while input_count == 1:
                        i_count += 1
                        input_count -= 1

                    counts["WORKSTN-1"] = i_count
                    counts["WORKSTN-1-ADD"] = i_count_a
                    counts["WORKSTN-10"] = x_count

        return counts

    def basicQuery(counts, df):
        for grp in counts:
            if counts[grp] > 0:
                query = session.query(DPitems.sap_num, DPitems.item_desc, DPitems.plist, DPitems.friendlyname, DPitems.shortid)\
                                .filter(DPitems.shortid == grp)\
                                .all()
                core_dict = {
                    'sap_num': query[0][0],
                    'item_desc': query[0][1],
                    'price': query[0][2],
                    'fname': query[0][3],
                    'shortid': query[0][4],
                    'qty': (counts[grp])
                }
                df = df.append(core_dict, ignore_index=True)

        return df

    def optionQuery(options, df):
        for option in options:
            query = session.query(DPitems.sap_num, DPitems.item_desc, DPitems.plist, DPitems.friendlyname, DPitems.shortid)\
                            .filter(DPitems.shortid == option)\
                            .all()

            core_dict = {
                'sap_num': query[0][0],
                'item_desc': query[0][1],
                'price': query[0][2],
                'fname': query[0][3],
                'shortid': query[0][4],
                'qty': 1
            }
            df = df.append(core_dict, ignore_index=True)

        return df

    def failoverQuery(failovercount, df):
        if failovercount > 0:
            R2MFOdone = False

            for shortid in df['shortid']:
                if 'WINFAX' in shortid:
                    continue
                if 'PDFPROC' in shortid:
                    continue
                if 'LASERFICHE' in shortid:
                    continue
                if 'HL7' in shortid:
                    continue
                if 'DROPBOXIN' in shortid:
                    continue
                if 'ACPUPGRADE' in shortid:
                    continue
                if 'OFFLOAD' in shortid:
                    continue
                if 'RELEASE2ME' in shortid and R2MFOdone == True:
                    continue
                elif 'RELEASE2ME' in shortid and R2MFOdone == False:
                    query = session.query(DPitems.sap_num, DPitems.item_desc, DPitems.plist, DPitems.friendlyname, DPitems.shortid)\
                            .filter(DPitems.shortid == "RELEASE2ME-FAILOVER")\
                            .all()
                    R2MFOdone = True

                else:
                    query = session.query(DPitems.sap_num, DPitems.item_desc, DPitems.plist, DPitems.friendlyname, DPitems.shortid)\
                            .filter(DPitems.shortid == shortid+"-FAILOVER")\
                            .all()

            core_dict = {
                'sap_num': query[0][0],
                'item_desc': query[0][1],
                'price': query[0][2],
                'fname': query[0][3],
                'shortid': query[0][4],
                'qty': failovercount
            }
            df = df.append(core_dict, ignore_index=True)

        return df

    def maintenanceQuery(df):
        for shortid in df['shortid']:
            mqty = int(df.loc[df['shortid'] == shortid]['qty'])
            query = session.query(DPitems.sap_num, DPitems.item_desc, DPitems.plist, DPitems.friendlyname, DPitems.shortid)\
                            .filter(DPitems.shortid == shortid+msufx)\
                            .all()
            core_dict = {
                'sap_num': query[0][0],
                'item_desc': query[0][1],
                'price': query[0][2],
                'fname': query[0][3],
                'shortid': query[0][4],
                'qty': mqty
            }
            df = df.append(core_dict, ignore_index=True)
        return df

    def prosvcsQuery(serviceGrps, df):
        for service in serviceGrps:
            print(service)
            if serviceGrps[service] != 0:
                print(serviceGrps[service])
                if serviceGrps[service] == 0 or serviceGrps[service] == None:
                    continue
                elif serviceGrps[service] == "on":
                    qty = 1
                else:
                    qty = serviceGrps[service]


                query = session.query(DPitems.sap_num, DPitems.item_desc, DPitems.plist, DPitems.friendlyname, DPitems.shortid)\
                                        .filter(DPitems.shortid == service)\
                                        .all()
                core_dict = {
                    'sap_num': query[0][0],
                    'item_desc': query[0][1],
                    'price': query[0][2],
                    'fname': query[0][3],
                    'shortid': query[0][4],
                    'qty': qty
                }
                df = df.append(core_dict, ignore_index=True)
        return df

    engine = create_engine(local_db)

    Base = automap_base(bind=engine)
    Base.prepare(engine, reflect=True)
    DP_nodes = Base.classes.dp_nodes
    DPitems = Base.classes.dp_items
    session = Session(engine)

    core_query = session.query(DPitems.sap_num, DPitems.item_desc, DPitems.plist, DPitems.friendlyname, DPitems.shortid)\
            .filter(DPitems.friendlyname == data["dp_edition"])\
            .all()
    core_dict = {
        'sap_num': core_query[0][0],
        'item_desc': core_query[0][1],
        'price': core_query[0][2],
        'fname': core_query[0][3],
        'shortid': core_query[0][4],
        'qty': 1
    }

    df = pd.DataFrame(data=core_dict, index=[0])
    df = basicQuery(countBreaker(countGrps), df)
    df = basicQuery(offloadGrps, df)
    df = optionQuery(options, df)
    df = failoverQuery(failoverCount, df)
    df = maintenanceQuery(df)
    df = prosvcsQuery(serviceGrps, df)

    quote_id = data['quote_id']
    cx_name = data['cx_name']
    cx_sapid = data['cx_sapid']
    bsc_name = data['bsc_name']
    rep_name = data['rep_name']
    quote_date = data['quote_date']
    edition = data.get('dp_edition')

    if edition == 'Foundations':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_foundations == True)\
                .all()

    elif edition == 'Professional':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_professional == True)\
                .all()

    elif edition == 'Office':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_office == True)\
                .all()

    elif edition == 'Legal':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_legal == True)\
                .all()

    elif edition == 'Healthcare':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_healthcare == True)\
                .all()

    elif edition == 'Government':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_government == True)\
                .all()

    elif edition == 'Finance':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_finance == True)\
                .all()

    elif edition == 'Education':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_education == True)\
                .all()

    elif edition == 'ECM':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_ecm == True)\
                .all()

    elif edition == 'ECM Basic':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_ecm_basic == True)\
                .all()

    elif edition == 'AccurioPro Connect':
        node_query = session.query(DP_nodes.friendlyname, DP_nodes.node_type)\
                .filter(DP_nodes.ed_ecm_basic == True)\
                .all()

    input_nodes = []
    process_nodes = []
    distribute_nodes = []

    for item in node_query:
        if item[1] == 'input':
            input_nodes.append(item[0])
        if item[1] == 'process':
            process_nodes.append(item[0])
        if item[1] == 'distribute':
            distribute_nodes.append(item[0])

    session.commit()
    engine.dispose()

    df['ext_price'] = df['price'] * df['qty']
    config = df.to_dict(orient='records')
    total_price = df['ext_price'].sum()
    result = {
        "quote_id": quote_id,
        "total_price": total_price,
        "edition": edition,
        "cx_name": cx_name,
        "cx_sapid": cx_sapid,
        "bsc_name": bsc_name,
        "rep_name": rep_name,
        "quote_date": quote_date,
        "nodes": {
            "input_nodes": input_nodes,
            "process_nodes": process_nodes,
            "distribute_nodes": distribute_nodes
        },
        "config": config
    }
    return result
Esempio n. 37
0
def replicate_sub(master_dsn, slave_dsn, tables=None):
    """Database replication subscriber.

    This meepo event sourcing system is based upon database primary key, so
    table should have a pk here.

    The function will subscribe to the event sourcing pk stream, retrive rows
    from master based pk and then update the slave.
    """
    logger = logging.getLogger("meepo.sub.replicate_sub")

    # sqlalchemy reflection
    logger.info("reflecting master database: {}".format(master_dsn))
    master_engine = sa.create_engine(master_dsn)
    master_base = automap_base()
    master_base.prepare(engine=master_engine, reflect=True)
    MasterSession = scoped_session(sessionmaker(bind=master_engine))

    logger.info("reflecting slave database: {}".format(slave_dsn))
    slave_engine = sa.create_engine(slave_dsn)
    slave_base = automap_base()
    slave_base.prepare(engine=slave_engine, reflect=True)
    SlaveSession = scoped_session(sessionmaker(bind=slave_engine))

    def _write_by_pk(name, pk):
        """Copy row from master to slave based on pk
        """
        MasterModel = master_base.classes[name]
        obj = MasterSession.query(MasterModel).get(pk)
        if not obj:
            logger.error("pk for {} not found in master: {}".format(name, pk))
            return

        SlaveModel = slave_base.classes[name]
        columns = [c.name for c in SlaveModel.__table__.columns]
        s_obj = SlaveModel(**{k: v
                              for k, v in obj.__dict__.items()
                              if k in columns})
        SlaveSession.add(s_obj)

        try:
            SlaveSession.commit()
        except SQLAlchemyError as e:
            SlaveSession.rollback()
            logger.exception(e)

        # cleanup
        MasterSession.close()
        SlaveSession.close()

    def _update_by_pk(name, pk):
        """Update row from master to slave based on pk
        """
        MasterModel = master_base.classes[name]
        obj = MasterSession.query(MasterModel).get(pk)

        SlaveModel = slave_base.classes[name]
        s_obj = SlaveSession.query(SlaveModel).get(pk)
        if not s_obj:
            return _write_by_pk(name, pk)

        columns = [c.name for c in SlaveModel.__table__.columns]
        for col in columns:
            try:
                val = getattr(obj, col)
            except AttributeError as e:
                continue
            setattr(s_obj, col, val)

        try:
            SlaveSession.commit()
        except SQLAlchemyError as e:
            SlaveSession.rollback()
            logger.exception(e)

        # cleanup
        MasterSession.close()
        SlaveSession.close()

    def _delete_by_pk(name, pk):
        """Copy row from slave based on pk
        """
        Model = slave_base.classes[name]
        obj = SlaveSession.query(Model).get(pk)
        if obj:
            SlaveSession.delete(obj)
        SlaveSession.commit()

        # cleanup
        SlaveSession.close()

    def _sub(table):

        def _sub_write(pk):
            logger.info("dbreplica_sub {}_write: {}".format(table, pk))
            _write_by_pk(table, pk)
        signal("%s_write" % table).connect(_sub_write, weak=False)

        def _sub_update(pk):
            logger.info("dbreplica_sub {}_update: {}".format(table, pk))
            _update_by_pk(table, pk)
        signal("%s_update" % table).connect(_sub_update, weak=False)

        def _sub_delete(pk):
            logger.info("dbreplica_sub {}_delete: {}".format(table, pk))
            _delete_by_pk(table, pk)
        signal("%s_delete" % table).connect(_sub_delete, weak=False)

    if tables:
        tables = (t for t in tables if t in slave_base.classes.keys())

    for table in tables:
        _sub(table)
Esempio n. 38
0
 def reflect_db_tables_to_sqlalchemy_classes(self):
     self.Base = automap_base()
     # reflect the tables present in the sql database as sqlalchemy models
     self.Base.prepare(self.engine, reflect=True)
Esempio n. 39
0
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func

from flask import Flask, jsonify, render_template

#################################################
# Database Setup
#################################################

# Create Engine
engine = create_engine("sqlite:///cities_db_3.sqlite")

# reflect an existing database into a new model
Base = automap_base()  # AUTO MAP OR DECLARATIVE?
# reflect the tables
Base.prepare(engine, reflect=True)

# Save references to table 'cities'
Cities = Base.classes.cities
Industry = Base.classes.industry
# Create our session (link) from Python to the DB
session = Session(engine)

#################################################
# Flask Setup
#################################################
app = Flask(__name__)

#################################################
Esempio n. 40
0
def pinger(bot, update, args):

    chat_id = update.message.chat_id
    username = update.message.from_user.username
    args_line = " ".join(args)

    engine = create_engine(DATABASE)
    Base = automap_base()
    Base.prepare(engine, reflect=True)
    pingers = Base.classes.pingers

    if username in ADMINS:
        try:
            p_username = args[0]
            try:
                match = args[1].lower()
            except:
                if args[0] not in ["all", "delete"]:
                    p_username = username
                    match = args[0]
            if not p_username: raise
        except:
            usage_text = "Usage: \n`/ping username <word>`\n`/ping all`\n`/ping delete username <word>`"
            bot.send_message(chat_id=update.message.chat_id,
                             parse_mode='markdown',
                             text=usage_text)
            return
        with connector(engine) as ses:
            try:
                if p_username == "all":
                    all_matches = ses.query(pingers).filter(pingers.chat_id == chat_id).all()
                    out_text = ""
                    for match in all_matches:
                        out_text += "\n{} | {}".format(match.username, match.match)
                    bot.send_message(chat_id=update.message.chat_id,
                                     text=out_text)
                elif p_username == "delete":
                    try:
                        p_username = args[1]
                        try:
                            delete_match = args[2].lower()
                        except:
                            p_username = username
                            delete_match = args[1].lower()
                    except:
                        out_text = "Usage `/ping delete username <word>`"
                        bot.send_message(chat_id=update.message.chat_id,
                                         parse_mode='markdown',
                                         text=out_text)
                        return
                    ses.query(pingers).filter(and_(
                           pingers.chat_id == chat_id,
                           pingers.username == p_username,
                           pingers.match == delete_match)).delete()
                    bot.send_message(chat_id=update.message.chat_id,
                                     text="Deleted")
                    log_print('Delete pinger "{0}"'.format(args_line), username)
                else:
                    with connector(engine) as ses:
                        new_pinger = pingers(
                            username=p_username,
                            match=match,
                            chat_id=chat_id)
                        ses.add(new_pinger)
                    bot.send_message(chat_id=update.message.chat_id,
                                     text="Successfuly added")
                    log_print('Added pinger "{0}"'.format(args_line), username)
            except:
                bot.send_message(chat_id=update.message.chat_id,
                                 text="There was some trouble")
                log_print('Error while add pinger "{0}"'.format(args_line), username)
    else:
        try:
            try:
                user_match = args[0].lower()
                if not user_match: raise
            except:
                out_text = "Usage: \n`/ping <word>`\n`/ping all`\n`/ping delete <word>`"
                bot.send_message(chat_id=update.message.chat_id,
                                 parse_mode='markdown',
                                 text=out_text)
                return
            with connector(engine) as ses:
                if user_match == "all":
                    all_matches = ses.query(pingers).filter(and_(
                                  pingers.chat_id == chat_id,
                                  pingers.username == username)).all()
                    out_text = ""
                    for match in all_matches:
                        out_text += "\n{} | {}".format(match.username, match.match)
                    bot.send_message(chat_id=update.message.chat_id,
                                     text=out_text)
                elif user_match == "delete":
                    try:
                        delete_match = args[1].lower()
                    except:
                        out_text = "Usage `/ping delete <word>`"
                        bot.send_message(chat_id=update.message.chat_id,
                                         parse_mode='markdown',
                                         text=out_text)
                        return
                    ses.query(pingers).filter(and_(
                              pingers.chat_id == chat_id,
                              pingers.username == username,
                              pingers.match == delete_match)).delete()
                    bot.send_message(chat_id=update.message.chat_id,
                                     text="Deleted")
                    log_print('Delete pinger "{0}"'.format(args_line))

                else:
                    count = ses.query(pingers).filter(and_(
                                pingers.chat_id == chat_id,
                                pingers.username == username)).count()
                    if count < 10:
                        new_pinger = pingers(
                            username=username,
                            match=user_match,
                            chat_id=chat_id)
                        ses.add(new_pinger)
                        bot.send_message(chat_id=update.message.chat_id,
                                         text="Successfuly added")
                        log_print('Added pinger "{0}"'.format(args_line), username)
                    else:
                        bot.send_message(chat_id=update.message.chat_id,
                                         text="You can add only 10 matches")
                        log_print('Pinger limit is settled', username)
        except:
            bot.send_message(chat_id=update.message.chat_id,
                             text="There was some trouble")
            log_print('Error while add pinger "{0}"'.format(args_line), username)
from sqlalchemy import create_engine, func
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, Float, Date, Numeric

from flask import Flask, jsonify, render_template

import datetime as dt

#################################################
# Database1 Setup
#################################################
engine = create_engine("sqlite:///hist.sqlite")
conn = engine.connect()

# reflect an existing database into a new model
Base = automap_base()

# reflect the tables
Base.prepare(engine, reflect=True)

# Create our session (link) from Python to the DB
session = Session(engine)

#################################################
# Database2 Setup
#################################################
engine2 = create_engine("sqlite:///histcity.sqlite")
conn2 = engine2.connect()

# reflect an existing database into a new model
Base2 = automap_base()
Esempio n. 42
0
subscribers = Table('subscribers', metadata,
                    Column('id', Integer, primary_key=True),
                    Column('email', String),
                    )

user_instances = Table('user_instances', metadata,
                       Column('id', Integer, primary_key=True),
                       Column('email', String),
                       Column('phone', String),
                       Column('card_number', String),
                       Column('pib', String)
                       )


Base = automap_base(metadata=metadata)
Base.prepare()

LPCustomer, WebsiteUser, Subscribers, UserInstance = Base.classes.loyalty_program_customers, \
                                                     Base.classes.website_users,  Base.classes.subscribers, \
                                                     Base.classes.user_instances


def generate_user_instances(session, instances):
    session.query(UserInstance).delete()

    lp_customers_cursor = session.query(
        LPCustomer.email.label('user_email'),
        LPCustomer.phone.label('user_phone'),
        LPCustomer.pib.label('user_pib'),
        LPCustomer.loyalty_card_number.label('user_card_number')
Esempio n. 43
0
    remote_esg_dbname = os.environ['remote_esg_dbname']
    remote_esg_dbuser = os.environ['remote_esg_dbuser']
    remote_esg_dbpwd = os.environ['remote_esg_dbpwd']
    API_KEY = os.environ['mapboxkey']
else:
    from config import remote_esg_host, remote_db_port, remote_esg_dbname, remote_esg_dbuser, remote_esg_dbpwd 

engine = create_engine(f"postgres://{remote_esg_dbuser}:{remote_esg_dbpwd}@{remote_esg_host}:{remote_db_port}/{remote_esg_dbname}")
conn = engine.connect()

# Initialize Flask application
app = Flask(__name__)
model = pickle.load(open("model.pkl", 'rb'))

# Set up SQL Alchemy connection and classes
Base = automap_base() # Declare a Base using `automap_base()`
Base.prepare(engine, reflect=True) # Use the Base class to reflect the database tables
Base.classes.keys() # Print all of the classes mapped to the Base
# ClientInfo = Base.classes.client_info # Assign the client_info class (table) to a variable called `ClientInfo`
session = Session(engine) # Create a session
print(Base.classes.keys())

# Develop flask routes for each page and then the routes for the database info to feed the plots in our js files

@app.route("/")
def index():
    """Return the homepage."""
    return render_template("index.html")

@app.route("/company_search")
def company_search():
Esempio n. 44
0
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, MetaData
from flask import Flask, jsonify, render_template

pg_connection_string = "postgresql://*****:*****@localhost:5432/us_ski_team_db"

engine = create_engine(pg_connection_string)

session = Session(engine)

m = MetaData()
Base = automap_base(bind=engine, metadata=m)
Base.prepare(engine, reflect=True)
print(Base.classes.keys())
ski_team = Base.classes.ski_team
mountain_elevations = Base.classes.mountain_elevations

app = Flask(__name__)


@app.route("/map")
def renderMap():
    return render_template("map.html")


@app.route("/")
def home():
    return render_template("index.html")

Esempio n. 45
0
def parser(bot, update):

    in_text = update.message.text.lower().replace('ё', 'е').replace(',', '').replace('.', '')
    engine = create_engine(DATABASE)
    Base = automap_base()
    Base.prepare(engine, reflect=True)

    answers = Base.classes.answers
    w_phrases = Base.classes.w_phrases
    google_ignore = Base.classes.google_ignore
    google = Base.classes.google
    ping_phrases = Base.classes.ping_phrases
    ping_exclude = Base.classes.ping_exclude
    pingers = Base.classes.pingers

    # ------------ Weather ----------------
    with connector(engine) as ses:
        try:
            phrase = "".join(ses.query(w_phrases.match).filter(
                literal(in_text.lower()).contains(w_phrases.match)).one())
            weather(bot, update, in_text.lower()[in_text.lower().find(phrase)+len(phrase):].split())
            return
        except NoResultFound:
            pass

    # ------------ Google -----------------
    with connector(engine) as ses:
        try:
            ses.query(google_ignore.ignore).filter(
                literal(in_text).like('%' + google_ignore.ignore + '%')).one()
        except NoResultFound:
            matches = ses.query(google.match).filter(
                literal(in_text).like(google.match + '%')).all()
            matches = [i for i in matches for i in i]
            if matches:
                g_in_text = in_text.replace("?", "")
                g_in_text = g_in_text.replace(
                    sorted(matches, key=len)[-1], "").strip()

                if g_in_text:
                    out_text = 'https://www.google.ru/search?q={0}'.format(
                        g_in_text.replace(" ", "+"))
                    bot.send_message(chat_id=update.message.chat_id,
                                     disable_web_page_preview=1, text=out_text)
                    log_print(
                        'Google "{0}"'.format(g_in_text.strip()),
                        update.message.from_user.username
                    )
                return

    # ------------ Ping -----------------
    with connector(engine) as ses:
        in_text_list = in_text.split()
        username = update.message.from_user.username
        chat_id = update.message.chat_id

        try:
            ses.query(ping_phrases.phrase).filter(
                ping_phrases.phrase.in_(in_text_list)).one()
            usernames = ses.query(pingers.username).filter(
                and_(
                    pingers.match.in_(in_text_list),
                    or_(
                        pingers.chat_id == chat_id,
                        pingers.chat_id == "all")
                )).distinct().all()
            usernames = [i for i in usernames for i in i]
            if 'EVERYONE GET IN HERE' in usernames:
                try:
                    ses.query(ping_exclude.match).filter(
                        ping_exclude.match.in_(in_text_list)).one()
                    usernames = ses.query(pingers.username).filter(
                        and_(
                            pingers.username.notin_(usernames),
                            pingers.username != username,
                            or_(
                                pingers.chat_id == chat_id,
                                pingers.chat_id == "all")
                        )).distinct().all()
                    usernames = [i for i in usernames for i in i]

                except NoResultFound:
                    usernames = ses.query(pingers.username).filter(
                        and_(
                            pingers.username != 'EVERYONE GET IN HERE',
                            pingers.username != username,
                            or_(
                                pingers.chat_id == chat_id,
                                pingers.chat_id == "all")
                        )).distinct().all()
                    usernames = [i for i in usernames for i in i]

            if usernames:
                out_text = " ".join(["@" + i for i in usernames])
                bot.send_message(chat_id=update.message.chat_id, text=out_text)
                log_print('Ping "{0}"'.format(out_text), username)
        except NoResultFound:
            pass

    # ------------ Answer -----------------
    with connector(engine) as ses:
        out_text = ses.query(answers.string).filter(
            literal(in_text).contains(answers.match))
    for message in ["".join(i) for i in out_text]:
        bot.send_message(chat_id=update.message.chat_id, text=message)
        log_print("Answer", update.message.from_user.username)
Esempio n. 46
0
from sqlalchemy import create_engine

from flask import Flask, jsonify, render_template
from flask_sqlalchemy import SQLAlchemy

app = Flask(__name__)

#################################################
# Database Setup
#################################################

app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///db/bellybutton.sqlite"
db = SQLAlchemy(app)

# reflect an existing database into a  new model
Base = automap_base()
# reflect the tables
Base.prepare(db.engine, reflect=True)

# Save references to each table
Samples_Metadata = Base.classes.sample_metadata
Samples = Base.classes.samples


@app.route("/")
def index():
    """Return the homepage."""
    return render_template("index.html")


@app.route("/names")
Esempio n. 47
0
def weather(bot, update, args):

    city = " ".join(args)
    username = update.message.from_user.username

    engine = create_engine(DATABASE)
    Base = automap_base()
    Base.prepare(engine, reflect=True)

    locations = Base.classes.locations

    if not city:
        with connector(engine) as ses:
            try:
                city = ses.query(locations.city).filter(
                    locations.username == username).one()
                city = "".join([i for i in city])
            except NoResultFound:
                try:
                    city = ses.query(locations.city).filter(
                        locations.username == "default_city").one()
                    city = "".join([i for i in city])
                except NoResultFound:
                    if username in ADMINS:
                        error_message = '''
                        You didn't set the default city
                        You can add default city by this command:
                        `/db insert into locations(username,city) \
                        values(\"default_city\",\"YOUR CITY HERE\")`'''
                        error_message = "\n".join(
                            [i.strip() for i in error_message.split('\n')])
                    else:
                        error_message = "Administrator didn't set the default city\nTry /w City"
                    bot.send_message(chat_id=update.message.chat_id,
                                     parse_mode='markdown', text=error_message)
                    return

    try:
        owm = pyowm.OWM(WEATHER_TOKEN, language='en')
    except:
        error_message = "Invalid API token"
        bot.send_message(chat_id=update.message.chat_id, text=error_message)
        log_print('Weather "{0}"'.format(error_message), username)
        return

    try:
        observation = owm.weather_at_place(city)
    except pyowm.exceptions.not_found_error.NotFoundError:
        error_message = "Wrong location"
        bot.send_message(chat_id=update.message.chat_id, text=error_message)
        log_print('"{0}"'.format(error_message), username)
        return

    fc = owm.three_hours_forecast(city)
    w = observation.get_weather()
    city = observation.get_location().get_name()

    weathers = {}

    # Today
    today = pyowm.timeutils.next_three_hours()
    weather = fc.get_weather_at(today)
    temp = str(round(weather.get_temperature(unit='celsius')["temp"]))
    if temp[0] != '-' and temp != "0":
        weathers["today", "temp", 0] = '+' + temp
    else:
        weathers["today", "temp", 0] = temp
    weathers["today", "emoji", 0] = get_emoji(weather.get_status())
    status = weather.get_detailed_status()
    weathers["today", "status", 0] = status[0].upper() + status[1:]

    # Tomorrow
    for i in range(6, 19, 6):
            weather = fc.get_weather_at(pyowm.timeutils.tomorrow(i, 0))
            temp = str(round(weather.get_temperature('celsius')["temp"]))
            if temp[0] != '-' and temp != "0":
                weathers["tomorrow", "temp", i] = '+' + temp
            else:
                weathers["tomorrow", "temp", i] = temp
            weathers["tomorrow", "emoji", i] = get_emoji(weather.get_status())
            status = weather.get_detailed_status()
            weathers["tomorrow", "status", i] = status[0].upper() + status[1:]

    now_temp = str(round(w.get_temperature(unit='celsius')["temp"]))
    if now_temp[0] != '-':
        now_temp = '+' + now_temp
    now_status = w.get_detailed_status()
    now_status = now_status[0].upper() + now_status[1:]
    now_emoji = get_emoji(w.get_status())

    try:
        message = ''.join("""
        *Now:*
        *{0}:* {1} {2} {3}

        *In near future:*
        {4} {5} {6}

        *Tomorrow:*
        *Morning:* {7} {8} {9}
        *Noon:* {10} {11} {12}
        *Evening:* {13} {14} {15}
        """.format(city,
                   now_temp,
                   now_emoji,
                   now_status,
                   *[weathers[i] for i in weathers]))
    except IndexError:
        error_message = "Something wrong with API:\n\n{}".format(weathers)
        bot.send_message(chat_id=update.message.chat_id, text=error_message)
        log_print('"{0}"'.format(error_message), username)
        return

    message = "\n".join([k.strip() for k in message.split('\n')])

    bot.send_message(chat_id=update.message.chat_id,
                     parse_mode="markdown", text=message)

    log_print('Weather "{0}"'.format(city), username)
def sql_join(engine):
    ### SQLite Join Function
    Base = automap_base()
    Base.prepare(engine, reflect=True)
    print(Base.classes.keys()) # No Data? -- No primary Key?
    session = Session(engine)