def insert_data_for_bank_by_row(cls, data, client_id): print("-------------------------------") #print (client_id) t0 = time.time() engine = db.get_engine() #print(len(data[1:])) error_row = None for row in data[1:]: if len(row) != 7: error_row = row print(json.dumps(error_row)) break # else: # print(error_row) # return('num value in row not match column', str(error_row)) if error_row: return {'num value in row not match column': error_row} try: engine.execute(cls.__table__.insert(), [{ 'date': row[0], 'account_entry': row[1], 'particulars': row[2], 'vch_type': row[3], 'vch_no': row[4], 'debit': row[5], 'credit': row[6], 'client_id': str(client_id) } for row in data[1:]]) except Exception as e: print("exception", e) return str(e) print("SQLAlchemy Core: Total time for " + str(len(data)) + " records " + str(time.time() - t0) + " secs") return None
def insert_data_for_sales_by_row(cls, data, client_obj_id): print("-------------------------------") print(client_obj_id) #print (data) # data = [i.split(',') for i in data] # print (data) t0 = time.time() engine = db.get_engine() print(data[1:2]) # for d in cls.chunks(data[1:], 6): try: engine.execute(cls.__table__.insert(), [{ 'Sales_id': row[0], 'Datetimestamp': row[1], 'Item_code': row[2], 'Item_name': row[3], 'Sale_quantity': row[4], 'Unitcost_price': row[5], 'Unitsale_price': row[6], 'Total_sales': row[7], 'Client_id': str(client_obj_id) } for row in data[1:]]) except Exception as e: print(e) print("SQLAlchemy Core: Total time for " + str(len(data)) + " records " + str(time.time() - t0) + " secs")
def insert_data_from_csv(cls): engine = db.get_engine() with open(PATH + 'mst_item.csv', 'r') as f: next(f) data = csv.reader(f) engine.execute(cls.__table__.insert(), [{ 'Item_code': row[0], 'Item_name': row[1], 'Category': " ", 'Sub_category': " ", 'Client_id': "1" } for row in data])
def fetch_all_by_client(cls, client_id): cols = ['Item_code', 'Item_name', 'Category', 'Sub_category'] engine = db.get_engine() with engine.connect() as conn: d = [{c: v for c, v in zip(cols, row[1:-1])} for row in conn.execute(cls.__table__.select().where( cls.Client_id == client_id)).fetchall()] df = pd.DataFrame(d) return df[cols]
def insert_data_from_csv(cls): engine = db.get_engine() with open(PATH + 'bank_dump.csv', 'r') as f: next(f) data = csv.reader(f) engine.execute(cls.__table__.insert(), [{ 'date': row[0], 'account_entry': row[1], 'particulars': row[2], 'vch_type': row[3], 'vch_no': row[4], 'debit': row[5], 'credit': row[6], 'client_id': "1" } for row in data])
def fetch_all_by_client(cls, client_id): cols = [ 'date', 'account_entry', 'particulars', 'vch_type', 'vch_no', 'debit', 'credit' ] engine = db.get_engine() with engine.connect() as conn: d = [{c: v for c, v in zip(cols, row[1:-1])} for row in conn.execute(cls.__table__.select().where( cls.client_id == client_id)).fetchall()] df = pd.DataFrame(d) #print(df.head()) return df[cols]
def fetch_all_by_client(cls, client_id): cols = [ 'Sales_id', 'Datetimestamp', 'Item_code', 'Item_name', 'Sale_quantity', 'Unitcost_price', 'Unitsale_price', 'Total_sales' ] engine = db.get_engine() with engine.connect() as conn: d = [{c: v for c, v in zip(cols, row[1:-1])} for row in conn.execute(cls.__table__.select().where( cls.Client_id == client_id)).fetchall()] df = pd.DataFrame(d) #print(df.head()) return df[cols]
def insert_data_from_csv(cls): engine = db.get_engine() with open(PATH + 'sales_details.csv', 'r') as f: next(f) data = csv.reader(f) engine.execute(cls.__table__.insert(), [{ 'Sales_id': row[0], 'Datetimestamp': row[1], 'Item_code': row[2], 'Item_name': row[3], 'Sale_quantity': row[4], 'Unitcost_price': row[5], 'Unitsale_price': row[6], 'Total_sales': row[7], 'Client_id': "1" } for row in data])
def insert_data_for_items_by_row(cls, data, client_obj_id): t0 = time.time() engine = db.get_engine() # data = [i.split(',') for i in data] # with open(PATH + 'mst_item.csv', 'r') as f: # next(f) # data = csv.reader(f) engine.execute(cls.__table__.insert(), [{ 'Item_code': row[0], 'Item_name': row[1], 'Category': row[2], 'Sub_category': row[3], 'Client_id': str(client_obj_id) } for row in data[1:]]) print("SQLAlchemy Core: Total time for " + str(len(data)) + " records " + str(time.time() - t0) + " secs")
def delete_client_data(cls, client_id): engine = db.get_engine() with engine.connect() as conn: conn.execute( cls.__table__.delete().where(cls.client_id == client_id))
def agent(args): """Run a long running polling agent. Periodically updates the database with timeseries active stat data. :param args: argparse args """ conf = config.load(args.config_file) nova = get_nova(conf) database.get_engine(conf.get('db', {})) detailed = True # Special all tenant. all_tenant_id = '0' * 32 # Init last poll time to 0 to trigger first poll last_polled = 0 while True: if time.time() - last_polled > conf.get('polling_interval', 120): with database.session_scope() as session: start, end = utils.get_date_interval() # Grab usage results usages = nova.usage.list(start, end, detailed=detailed) r = UsageReport(usages) # Get datetime for time value now = utils.get_now() # Iterate over all tenants for tenant_usage in usages: # Ensure tenant is in tenant table tenant_id = tenant_usage.tenant_id tenant = Tenant.get_or_create(session, tenant_id) session.commit() # Get tenant stats and add to session v, m, s = r.active_stats(tenant_id=tenant_id) session.add(ActiveVCPUS( value=v, time=now, tenant_id=tenant.id )) session.add(ActiveMemoryMB( value=m, time=now, tenant_id=tenant.id )) session.add(ActiveLocalStorageGB( value=s, time=now, tenant_id=tenant.id )) # Save all tenant stats v, m, s = r.active_stats() print ("Active vcpus", v) print ("Active memory MB", m) print ("Active storage GB", s) all_tenant = Tenant.get_or_create(session, all_tenant_id) session.commit() session.add(ActiveVCPUS( value=v, time=now, tenant_id=all_tenant.id )) session.add(ActiveMemoryMB( value=m, time=now, tenant_id=all_tenant.id )) session.add(ActiveLocalStorageGB( value=s, time=now, tenant_id=all_tenant.id )) last_polled = time.time() print ("Updating polling interval") time.sleep(1) exit()
def has_any_tables(model_class): from db import db engine = db.get_engine(bind=model_class.__bind_key__) return model_class.metadata.tables[model_class.__tablename__].exists( engine)