def run( conn_id: int, plan_id: int, plan_instance_id: int, tables_json: str, ): progress_key = f"discovery:search:progress:connections:{conn_id}:plans:{plan_id}:instances:{plan_instance_id}" for db in get_db(): connection = db.query(m.Connection).get(conn_id) plan_instance = ( db.query(m.PlanInstance) .filter( m.Connection.id == conn_id, m.PlanInstance.id == plan_instance_id, ) .one() ) tables = [Table.parse_obj(t) for t in json.loads(tables_json)] for result in search_tables( connection=connection, tables=tables, rules=[Rule.from_orm(r) for r in plan_instance.plan.rules], ): channel = f"discovery:search:connections:{conn_id}:plans:{plan_id}:instances:{plan_instance_id}" redis.publish(channel, json.dumps(jsonable_encoder(result.dict()))) if result.hit: d = result.discovery discovery = m.Discovery( schema_name=d.schema_name, table_name=d.table_name, column_name=d.column_name, plan_instance_id=plan_instance_id, rule_id=result.discovery.rule.id, ) db.add(discovery) db.commit() redis.incr(progress_key)
def post(self): # if not re.search(self.USERNAME_REGEXP, auth_ns.payload['username']): # raise ValidationException(error_field_name='username', # message='4-16 symbols, can contain A-Z, a-z, 0-9, _ \ # (_ can not be at the begin/end and can not go in a row (__))') if not re.search(self.PASSWORD_REGEXP, auth_ns.payload['password']): raise ValidationException(error_field_name='password', message='Password must contain minimum six characters, at least one letter and one number') conn = get_db() cursor = conn.cursor() sql = "SELECT user_id FROM users WHERE user_mobile= %s" data = auth_ns.payload['mobile'] cursor.execute(sql, (data,)) rows = cursor.fetchall() if rows: # user already present raise ValidationException( error_field_name='mobile', message='This mobile number already exists') _hashed_password = generate_password_hash(auth_ns.payload['password']) sql = "INSERT INTO users(user_mobile, user_password) VALUES(%s, %s)" data = (auth_ns.payload['mobile'], _hashed_password,) cursor.execute(sql, data) conn.commit() access_token = generateToken(cursor.lastrowid) return {'success': True, 'access_token': access_token}
async def read(self) -> Tuple[List[dict], str]: data = [] await asyncio.sleep(1) for sensor in get_db().query(Sensor).all(): temperature = round((22 + random.random() * 3), 1) data.append({'pin': sensor.pin, 'rtd': rtd_from_temp(sensor, temperature)}) return data, ''
def test_create_edit_post(client, app): # Login admin user since create and edit are available to admin only rv = client.post('/login', data=dict(email='Test@email', password='******'), follow_redirects=True) assert b"You logged in succesfully" in rv.data # Delete test posts if they exist db = get_db(app) db.post.delete_one({'title': 'TestTitle'}) db.post.delete_one({'title': 'TestTitleEdit'}) # Create test post rv = client.post('/blog/create', data=dict(title='TestTitle', body='TestBody'), follow_redirects=True) assert b"Your post has been successfully created" in rv.data # Make sure the post is available rv = client.get('/blog/testtitle', follow_redirects=True) assert rv.status_code == 200 # Edit post rv = client.post('/blog/edit/testtitle', data=dict(title='TestTitleEdit', body='TestBodyEdit', tags='Test,Tag')) assert rv.status_code == 302 # Make sure the new edited post is available rv = client.get('/blog/testtitleedit', follow_redirects=True) assert rv.status_code == 200
def add(name, email, username, password): for db in get_db(): user: model.User = model.User( name=name, email=email, username=username, password=password ) db.add(user) db.commit() print("User created")
def create(payload): """Save a new register.""" if 'password' in payload: payload['password'] = crypt.crypt(payload['password']) doc = get_db()[collection].insert_one(payload) return find({'_id': doc.inserted_id})
def update(filters, payload): """Update data by filters.""" if 'password' in payload: payload['password'] = crypt.crypt(payload['password']) doc = get_db()[collection].find_one_and_update( serialize_filter(filters), {'$set': payload}, return_document=ReturnDocument.AFTER) return serialize_doc(doc)
def post(self, current_user): conn = get_db() cursor = conn.cursor() sql = "INSERT INTO products(product_name,product_description,product_price) VALUES(%s, %s, %s)" data = ( product_ns.payload['name'], product_ns.payload['description'], product_ns.payload['price'], ) cursor.execute(sql, data) conn.commit() return {'success': True, 'id': cursor.lastrowid}
def delete(self, current_user, product_id): conn = get_db() cursor = conn.cursor(dictionary=True) sql = "SELECT product_id FROM products WHERE product_id=%s" data = (product_id, ) cursor.execute(sql, data) cursor.fetchall() print(cursor.rowcount) if cursor.rowcount < 1: return {"success": False, "message": "Product not found"}, 400 sql = "DELETE FROM products WHERE product_id=%s" data = (product_id, ) cursor.execute(sql, data) conn.commit() return {"success": True, "message": "Product deleted successfully"}
def put(self, current_user, product_id): conn = get_db() cursor = conn.cursor(dictionary=True) sql = "SELECT product_id FROM products WHERE product_id=%s" data = (product_id, ) cursor.execute(sql, data) cursor.fetchall() print(cursor.rowcount) if cursor.rowcount < 1: return {"success": False, "message": "Product not found"}, 400 sql = "UPDATE products SET product_name=COALESCE(%s,product_name),product_description=COALESCE(%s,product_description),product_price=COALESCE(%s,product_price) WHERE product_id=%s" data = (product_ns.payload['name'], product_ns.payload['description'], product_ns.payload['price'], product_id) cursor.execute(sql, data) conn.commit() return {"success": True, "message": "Product updated successfully"}
def validate(username, password): con = get_db() completion = False id_user = 0 type_user = 0 with con: cur = con.cursor() cur.execute("SELECT * FROM user") rows = cur.fetchall() for row in rows: dbUser = row[2] dbPass = row[3] if dbUser == username: completion = check_password(dbPass, password) if completion: id_user = get_user_id_from_email(username) type_user = get_user_type_from_email(username) return (completion, id_user, type_user)
def post(self): # """ # Look implementation notess # This API implemented JWT. Token's payload contain: # 'uid' (user id), # 'exp' (expiration date of the token), # 'iat' (the time the token is generated) # """ conn = get_db() cursor = conn.cursor(dictionary=True) sql = "SELECT user_id,user_password FROM users WHERE user_mobile= %s" data = auth_ns.payload['mobile'] cursor.execute(sql, (data,)) user = cursor.fetchone() if not user: auth_ns.abort(401, 'Incorrect username or password') if check_password_hash(user['user_password'], auth_ns.payload['password']): access_token = generateToken(user['user_id']) return {'success': True, 'access_token': access_token}, 200 else: auth_ns.abort(401, 'Incorrect username or password')
def init_db(): """ initialize a database created a database schema according to schema.sql """ with app.app_context(): print( '\n\n> Checking if the geopy module is working, entered (lat, long) as (10, 10)' ) print(latlong_to_address(10, 10)) db = get_db() print('\n\n> Started with the creating the database') with app.open_resource('schema.sql', mode='r') as f: store = f.read() db.cursor().executescript(store) db.commit() print('\n\n> Created the database') print( query_db( "SELECT name FROM sqlite_master WHERE type ='table' AND name NOT LIKE 'sqlite_%';" ), '\n') create_the_databse() check_farmer() check_banks() check_transporter() check_authorities() check_shopvendor() bank_rtf = [] crop_price1 = [] for i in range(3): bank_rtf.append(bank_rateofff(i)) crop_price1.append(crop_price(i)) crop_sum1 = crop_sum() shopvendor_auth1 = shopvendor_auth() storage_auth1 = storage_provider_auth() #SVID nikalde form se yaha SVID = "SV_191" shop_inv1 = shop_inv(SVID)
def on_chord_error(task_id, conn_id, plan_id, plan_instance_id): print(f"Error callback: {conn_id} {plan_id}, {plan_instance_id}") for db in get_db(): plan = ( db.query(m.Plan) .filter(m.Connection.id == conn_id, m.Plan.id == plan_id,) .one() ) plan_instance = ( db.query(m.PlanInstance) .filter( m.Connection.id == conn_id, m.Plan.id == plan_id, m.PlanInstance.id == plan_instance_id, ) .one() ) update_status(plan, "error", db) update_status(plan_instance, "error", db) channel = f"discovery:search:connections:{conn_id}:plans:{plan_id}:instances:{plan_instance_id}" redis.publish(channel, json.dumps({"done": True}))
def start(conn_id: int, plan_instance_id: int): for db in get_db(): plan_instance: m.PlanInstance = db.query(m.PlanInstance).filter( m.Connection.id == conn_id, m.PlanInstance.id == plan_instance_id ).one() update_status(plan_instance, "running", db) schemas = json.loads(plan_instance.schemas) connection = db.query(m.Connection).get(conn_id) packs: List[List[Table]] = get_table_packs( connection, schemas, plan_instance.worker_count ) plan_id: int = plan_instance.plan_id total = len(plan_instance.plan.rules) * sum( [len(pack) for pack in packs] ) total_key = f"discovery:search:total:connections:{conn_id}:plans:{plan_id}:instances:{plan_instance_id}" progress_key = f"discovery:search:progress:connections:{conn_id}:plans:{plan_id}:instances:{plan_instance_id}" redis.set(total_key, total) redis.set(progress_key, 0) cb = callback.s(conn_id, plan_id, plan_instance_id).on_error( on_chord_error.s(conn_id, plan_id, plan_instance_id) ) chord( [ run.s( conn_id, plan_id, plan_instance_id, json.dumps(pack, default=vars), ) for pack in packs ] )(cb)
def delete(filters): """Delete data by filters.""" doc = get_db()[collection].delete_one(serialize_filter(filters)) return {'deleted_count': doc.deleted_count}
def find(filters={}): """Find one data by filters.""" doc = get_db()[collection].find_one(serialize_filter(filters)) return serialize_doc(doc)
ids.append(instance.id) ids2.append({'InstanceId': instance.id}) if idx == num_worker: break ec2.instances.filter(InstanceIds=ids).terminate() deregister_inst_elb(ids2) print("Instance removal success") #Autoscaling function while True: cnx = get_db() cursor = cnx.cursor() query = '''SELECT * FROM auto_scale''' cursor.execute(query) row = cursor.fetchone() max_thresh = row[1] min_thresh = row[2] add_r = row[3] red_r = row[4] auto_toggle = row[5] if auto_toggle: # create connection to ec2 print("Automatic scaling enabled ") ec2 = boto3.resource('ec2')
def get(self, current_user): conn = get_db() cursor = conn.cursor(dictionary=True) sql = "SELECT product_id,product_name,product_price,product_description FROM products" cursor.execute(sql, ) return cursor.fetchall()
async def session(): db = next(get_db()) try: yield db finally: db.close()
def __init__(self): self.db = get_db() self.serial_port: SerialPortWrapper = SerialPortWrapper()
def _reset_db(): db = next(get_db()) db.query(models.User).filter(models.User.email == test_user_email).delete() db.commit()
def get(filters={}, limit=25, page=1): """Retrieve all data by filters.""" docs = get_db()[collection].find(serialize_filter(filters)).skip( limit * (page - 1)) return serialize_doc(docs)
{'up': 6, 'down': [29, 30], 'relay': 13}, {'up': 7, 'down': [31, 32], 'relay': 15}, {'up': 8, 'down': [33, 34, 35], 'relay': 16}, {'up': 9, 'down': [36, 37, 38], 'relay': 18} ] }, { 'label': 'Теплица 3', 'sensors': [ {'up': 10, 'down': [39, 40, 41, 42]} ] } ] if __name__ == '__main__': db = get_db() for house in houses: house_instance = House(label=house['label']) db.add(house_instance) db.commit() for sensor in house['sensors']: up_sensor = Sensor(pin=sensor['up'], sensor_type=1000, location='up', house_id=house_instance.id, high_threshold=HIGH_THRESHOLD, low_threshold=LOW_THRESHOLD) db.add(up_sensor) db.commit() relay_id = None
def test_registration(client, app): db = get_db(app) delete_user(db, user) rv = registrate(client, user['name'], user['email'], user['password']) assert b"Your registration was succesfull" in rv.data
from app.core import create_app from app.database import get_db if __name__ == '__main__': app = create_app() with app.app_context(): get_db().initialize()