async def receive_edits(req, resp): session = Session() body = await req.media() geojson = body.get("geojson") props = geojson.get("properties") geometry = geojson.get("geometry") s = shape(geometry) exec_str = f""" BEGIN DECLARE @uuid VARCHAR(50); DECLARE @uuid2 UNIQUEIDENTIFIER; SET @uuid = '{props.get('id')}'; SET @uuid2 = CAST(SUBSTRING(@uuid, 1, 8) + '-' + SUBSTRING(@uuid, 9, 4) + '-' + SUBSTRING(@uuid, 13, 4) + '-' + SUBSTRING(@uuid, 17, 4) + '-' + SUBSTRING(@uuid, 21, 12) AS UNIQUEIDENTIFIER); INSERT INTO Warehouse.dbo.[LocationEdits] ("LocationID", "NameBrand", "Address", "GeoEditDate", "ShapeGeo") VALUES ( @uuid2, '{props.get('NameBrand').replace("'","")}', '{props.get('Address').replace("'","")}', '{datetime.now().date().isoformat()}', geography::STGeomFromText('{s.wkt}', 4326) ) END """ session.execute(text(exec_str)) session.query(Business).filter(Business.id == props.get("id")).update( {"edited": True}) session.commit() resp.media = {"success": True} session.close()
def create_latest_demand(source_table, latest_table): query = "SELECT * FROM {}".format(source_table) df = pd.read_sql(query, engine, index_col="id") print("# of rows {}".format(len(df))) unit_min = GLOBAL_STATE_UPDATE_CYCLE / 60 df["t"] = (df.request_datetime / unit_min).astype(int) * unit_min df['x'] = ((df['origin_lon'] - MIN_LON) / DELTA_LON).astype(int) df['y'] = ((df['origin_lat'] - MIN_LAT) / DELTA_LAT).astype(int) latest_df = df.groupby(['t', 'x', 'y']).size() latest_df.name = 'demand' drop_table = """ DROP TABLE IF EXISTS {}; """.format(latest_table) Session.execute(drop_table) Session.commit() latest_df.reset_index().to_sql(latest_table, engine, flavor=None, schema=None, if_exists='fail', index=True, index_label=None, chunksize=None, dtype=None) create_index = """ CREATE INDEX index_{table} ON {table} (t); """.format(table=latest_table) Session.execute(create_index) Session.commit()
def create_demand_profile(df, profile_table, n_weeks): df["dayofweek"] = df.datetime_obj.apply(lambda x: x.weekday()) df["hour"] = df.datetime_obj.apply(lambda x: x.hour) df['x'] = ((df['origin_lon'] - MIN_LON) / DELTA_LON).astype(int) df['y'] = ((df['origin_lat'] - MIN_LAT) / DELTA_LAT).astype(int) profile_df = df.groupby(['dayofweek', 'hour', 'x', 'y' ]).size() / float(n_weeks) profile_df.name = 'demand' drop_table = """ DROP TABLE IF EXISTS {}; """.format(profile_table) Session.execute(drop_table) Session.commit() profile_df.reset_index().to_sql(profile_table, engine, flavor=None, schema=None, if_exists='fail', index=True, index_label=None, chunksize=None, dtype=None) create_index = """ CREATE INDEX index_{table} ON {table} (dayofweek, hour); """.format(table=profile_table) Session.execute(create_index) Session.commit()
def create_request_backlog(input_file_path, table_name): df = pd.read_csv(input_file_path, index_col='id') print("load {} rows".format(len(df))) # df.index.name = 'id' drop_table = """ DROP TABLE IF EXISTS {}; """.format(table_name) Session.execute(drop_table) Session.commit() df.to_sql(table_name, engine, schema=None, if_exists='fail', index=True, index_label=None, chunksize=None, dtype=None) create_index = """ CREATE INDEX index_request ON {} (request_datetime); """.format(table_name) Session.execute(create_index) Session.commit() print("complete db insert")
def transactions2csv(dest): s = Session() statement = banks.get_transaction_log_statement() result = s.execute(statement).fetchall() s.close() total = len(result) header = True with open(dest, 'w') as csvfile: spamwriter = csv.writer(csvfile, delimiter=';', quotechar='"', quoting=csv.QUOTE_NONNUMERIC) for next in result: if header: spamwriter.writerow(next.keys()) header = False row = list(next.values()) amount = row[0] while amount > unit: row[0] = unit amount -= unit spamwriter.writerow(row) row[0] = amount spamwriter.writerow(row)
async def update_content(req, resp): session = Session() body = await req.media() poly_bound = body.get("polybound") features = session.execute(f""" SELECT LocationID, NameBrand, StoreID, Warehouse.dbo.geography2json(ShapeGeo) as geometry, timestamp, Address, 'Production' as Status FROM Warehouse.dbo.Location WHERE ShapeGeo.STGeometryType()='POLYGON' AND ShapeGeo.STIntersects(geography::STGeomFromText('{poly_bound}',4326))=1 UNION SELECT LocationID, NameBrand, StoreID,Warehouse.dbo.geography2json(ShapeGeo) as geometry, GeoEditDate, Address, 'Staged' as Status FROM Warehouse.dbo.LocationEdits WHERE ShapeGeo.STGeometryType()='POLYGON' AND ShapeGeo.STIntersects(geography::STGeomFromText('{poly_bound}',4326))=1""" ).fetchall() collection = { "type": "FeatureCollection", "features": [{ "type": "Feature", "geometry": json.loads(item.__getitem__("geometry")), "properties": { "NameBrand": item.__getitem__("NameBrand").__str__(), "Timestamp": item.__getitem__("timestamp").__str__(), "Status": item.__getitem__("Status").__str__(), "Address": item.__getitem__("Address").__str__(), }, } for item in features], } resp.content = simplejson.dumps(collection) session.close()
def signup_customer(): session = Session() print(request.data) req = request.get_json() login = req['login'] if login == '': return jsonify(msg="login cannot be empty"), 400 name = req['name'] if name == '': return jsonify(msg="name cannot be empty"), 400 password = req['password'] if password == '': return jsonify(msg="password cannot be empty"), 400 address = req['address'] if address == '': return jsonify(msg="address cannot be empty"), 400 try: res = session.execute("INSERT INTO customers (name, address) "\ "VALUES (:name, :address) RETURNING *", {"name": name, "address": address}) cid = res.fetchone()[0] print("new user cid = {}".format(cid)) res = session.execute("INSERT INTO login (loginName, password, cid, eid)"\ "VALUES (:login, :password, :cid, NULL)", {"login": login, "password": password, "cid": cid}) user = SnackStoreUser() user.data['login'] = login user.data['name'] = name user.data['cart'] = [] user.commit() session.commit() except Exception as e: session.rollback() traceback.print_exc() return jsonify(msg=e.args[0]), 400 ret = jsonify(msg="success") ret.set_cookie('tag', user.key) return ret, 200
def get_intermediaries_count(): stmt = """ select count(intermediary_org) from intermediary """ s = Session() result = s.execute(stmt).first() s.close() return int(result.values()[0])
def _get_page(statement, page_num=0, page_size=25, order=default_order): s = Session() col = column(statement.columns.keys()[order["col"]]) order_func = getattr(col, order["dir"]) if order else None offset = page_num * page_size pg_stmt = statement.order_by(order_func()).offset(offset).limit(offset + page_size) result = s.execute(pg_stmt) s.close() return result
def create_od_profile(df, profile_table, n_weeks): hours_bin = DESTINATION_PROFILE_TEMPORAL_AGGREGATION n_agg = DESTINATION_PROFILE_SPATIAL_AGGREGATION df["dayofweek"] = df.datetime_obj.apply(lambda x: x.weekday()) df["hours_bin"] = (df.datetime_obj.apply(lambda x: x.hour) / hours_bin).astype(int) df['origin_x'] = ((df['origin_lon'] - MIN_LON) / (DELTA_LON * n_agg)).astype(int) df['origin_y'] = ((df['origin_lat'] - MIN_LAT) / (DELTA_LAT * n_agg)).astype(int) df['destination_x'] = ((df['destination_lon'] - MIN_LON) / (DELTA_LON * n_agg)).astype(int) df['destination_y'] = ((df['destination_lat'] - MIN_LAT) / (DELTA_LAT * n_agg)).astype(int) od_df = df.groupby([ 'dayofweek', 'hours_bin', 'origin_x', 'origin_y', 'destination_x', 'destination_y' ]).trip_time.agg(['count', 'mean']) od_df = od_df.rename(columns={ 'count': 'demand', 'mean': 'trip_time' }).reset_index() drop_table = """ DROP TABLE IF EXISTS {}; """.format(profile_table) Session.execute(drop_table) Session.commit() od_df.to_sql(profile_table, engine, flavor=None, schema=None, if_exists='fail', index=True, index_label=None, chunksize=None, dtype=None) create_index = """ CREATE INDEX index_{table} ON {table} (dayofweek, hours_bin); """.format(table=profile_table) Session.execute(create_index) Session.commit()
def query_total_amount(): stmt = """ select sum(tint.inflow) inflow, sum(tint.outflow) outflow from intermediary tint """ s = Session() result = s.execute(stmt).first() s.close() return format_amount(min(result.inflow, result.outflow))
def init_intermediary_table(): s = Session() s.execute(drop_table_intermediary) s.execute(create_table_intermediary) s.execute(populate_table_intermediary) s.commit() s.close()
def checkout(): key = request.cookies.get('tag') user = SnackStoreUser(key) session = Session() res = session.execute( """ SELECT cid FROM login WHERE loginname = :login """, {'login': user.data['login']}) cid = res.fetchone()[0] print('cid = ' + str(cid)) if cid is None: return jsonify(msg="you are a staff"), 400 res = session.execute( """ INSERT INTO customerorders (cid, date, status) VALUES (:cid, current_date, 'incomplete') RETURNING * """, {'cid': cid}) oid = res.fetchone()['oid'] print('old = ' + str(oid)) for item in user.data['cart']: sid = item['sid'] quantity = item['quantity'] print("item sid = {}, quantity = {}".format(sid, quantity)) session.execute( """ INSERT INTO customersuborders (oid, sid, quantity) VALUES (:oid, :sid, :quantity) """, { 'oid': oid, 'sid': sid, 'quantity': quantity }) user.data['cart'] = [] user.commit() session.commit() return jsonify(msg="success"), 200
def init_balance_table(): s = Session() s.execute(drop_table_balance) s.execute(create_table_balance) s.execute(populate_table_balance) s.commit() s.close()
def init_cashflow_table(): s = Session() s.execute(drop_table_cashflow) s.execute(create_table_cashflow) s.execute(populate_table_cashflow) s.commit() s.close()
def generate(self, current_time, timestep): try: requests = Session.execute( query.format(table=self.table, t1=current_time, t2=current_time + timestep)) customers = [Customer(request) for request in requests] except: Session.rollback() raise finally: Session.remove() return customers
def get_recommendation_data(): # TODO Implement failure if database is empty or does not exist session = Session() # https://docs.sqlalchemy.org/en/14/orm/query.html # https://www.tutorialspoint.com/sqlalchemy/sqlalchemy_orm_using_query.htm recommender_data = session.execute( 'SELECT title, overview FROM products') data_dict = {} title_list = [] overview_list = [] for title, overview in recommender_data: title_list.append(title) overview_list.append(overview) data_dict['titles'] = title_list data_dict['overviews'] = overview_list session.close() return jsonify(data_dict), 200
def generate(self, current_time, timestep): try: # List of requests within a certain timeframe requests = list( Session.execute( query.format(table=self.table, t1=current_time, t2=current_time + timestep))) # List of customers associated with each request customers = [Customer(request) for request in requests] # for r in requests: # print("Iterating R: ", r) # print("Cust: ", len(customers), requests) except: Session.rollback() raise finally: Session.remove() return customers
def clear_schedule_cache(): """clears schedule cache stored in db""" session = Session() session.execute("DELETE FROM schedule_cache") session.commit() session.close()
def _total_records(statement): s = Session() result = s.execute(statement).fetchall() s.close() return len(result)
def get_all_aliases_by_one(name, jurisdiction): s = Session() result = s.execute(get_all_aliases_by_one_statement(name, jurisdiction)).fetchall() s.close() return result
def get_all_simple_aliases(): s = Session() result = s.execute(get_all_simple_aliases_statement()).fetchall() s.close() return result