def _create_table(self, model_class, safe=False): statement = 'CREATE TABLE IF NOT EXISTS' if safe else 'CREATE TABLE' meta = model_class._meta columns, constraints = [], [] if meta.composite_key: pk_cols = [ meta.fields[f].as_entity() for f in meta.primary_key.field_names ] constraints.append( Clause(SQL('PRIMARY KEY'), EnclosedClause(*pk_cols))) for field in meta.sorted_fields: columns.append(self.field_definition(field)) ## No ForeignKeyField support #if isinstance(field, ForeignKeyField) and not field.deferred: # constraints.append(self.foreign_key_constraint(field)) if model_class._meta.constraints: for constraint in model_class._meta.constraints: if not isinstance(constraint, Node): constraint = SQL(constraint) constraints.append(constraint) return Clause(SQL(statement), model_class.as_entity(), EnclosedClause(*(columns + constraints)))
def generate_select(self, query, alias_map=None): model = query.model_class db = model._meta.database alias_map = self.calculate_alias_map(query, alias_map) if isinstance(query, CompoundSelect): clauses = [_StripParens(query)] else: if not query._distinct: clauses = [SQL('SELECT')] else: clauses = [SQL('SELECT DISTINCT')] if query._distinct not in (True, False): clauses += [SQL('ON'), EnclosedClause(*query._distinct)] # basic support for query limit if query._limit is not None or (query._offset and db.limit_max): limit = query._limit if query._limit is not None else db.limit_max clauses.append(SQL('TOP %s' % limit)) select_clause = Clause(*query._select) select_clause.glue = ', ' clauses.extend((select_clause, SQL('FROM'))) if query._from is None: clauses.append(model.as_entity().alias(alias_map[model])) else: clauses.append(CommaClause(*query._from)) if query._windows is not None: clauses.append(SQL('WINDOW')) clauses.append( CommaClause(*[ Clause(SQL(window._alias), SQL('AS'), window.__sql__()) for window in query._windows ])) join_clauses = self.generate_joins(query._joins, model, alias_map) if join_clauses: clauses.extend(join_clauses) if query._where is not None: clauses.extend([SQL('WHERE'), query._where]) if query._group_by: clauses.extend([SQL('GROUP BY'), CommaClause(*query._group_by)]) if query._having: clauses.extend([SQL('HAVING'), query._having]) if query._order_by: clauses.extend([SQL('ORDER BY'), CommaClause(*query._order_by)]) # NO OFFSET SUPPORT if query._for_update: clauses.append(SQL(query._for_update)) return self.build_query(clauses, alias_map)
def test_custom_collation(self): for i in [1, 4, 3, 5, 2]: Post.create(message='p%d' % i) pq = Post.select().order_by( Clause(Post.message, SQL('collate collate_reverse'))) self.assertEqual([p.message for p in pq], ['p5', 'p4', 'p3', 'p2', 'p1'])
def _date_operator_compare(date, kwargs, dt_converts=datetime_converts): if '{0}_operator'.format(date) in kwargs: date_oper = getattr(OP, kwargs['{0}_operator'.format(date)].upper()) else: date_oper = OP.EQ if date_oper == OP.BETWEEN: date_obj_min = dt_converts(kwargs[date][0]) date_obj_max = dt_converts(kwargs[date][1]) date_obj = Clause(date_obj_min, R('AND'), date_obj_max) else: date_obj = dt_converts(kwargs[date]) return (date_obj, date_oper)
def get_query(model, info, filters={}, order_by=[], page=None, paginate_by=None, total_query=None): query = None if isinstance(model, Query): query = model model = query.model_class if isinstance(model, (Model, BaseModel)): alias_map = {} selections = next( field for field in info.field_asts if field.name.value == info.field_name).selection_set.selections requested_model, requested_joins, requested_fields = get_requested_models( model, selections, alias_map) if query is None: query = requested_model.select(*requested_fields) if not requested_fields: query._select = () query = join(query, requested_joins) query = filter(query, filters, alias_map) query = order(requested_model, query, order_by, alias_map) query = paginate(query, page, paginate_by) if page and paginate_by or get_field_from_selections( selections, 'total'): # TODO: refactor 'total' if total_query: total = Clause(total_query).alias(TOTAL_FIELD) else: total = Clause(fn.Count(SQL('*')), fn.Over(), glue=' ').alias(TOTAL_FIELD) query._select = tuple(query._select) + (total, ) if not query._select: query = query.select(SQL('1')) # bottleneck # query = query.aggregate_rows() return query return model
def get_plant_sensor_data(p_uuid, sensor): plant = Plant.get(Plant.uuid == p_uuid) sensor = Sensor.get(Sensor.name == sensor) if plant.role != 'master' and sensor.name not in slave_supported: plant = Plant.get(Plant.uuid == UUID(plant.role)) sensor_data_set = SensorData.select(SensorData.value, fn.CAST(Clause(fn.strftime('%s', SensorData.created_at), SQL('AS INT'))).alias('timestamp')) \ .where(SensorData.plant == plant) \ .where(SensorData.sensor == sensor) \ .order_by(SensorData.created_at.asc()) content = list(sensor_data_set) return json.dumps(content)
def get(self): logger.log_python_api_get(SuggestedProfilesResource.api_url) current_user_profile = Profile.get(id=current_user.id) skills_list = safe_split_strip_remove_empty( current_user_profile.skills) location_part_list = safe_split_strip_remove_empty( current_user_profile.location) position_title_list = [p.title for p in current_user_profile.positions] clauses = [Profile.id != current_user.id] or_clauses = [] for skill in skills_list: or_clauses.append(Profile.skills.contains(skill)) for location_part in location_part_list: or_clauses.append(Profile.location.contains(location_part)) if any(position_title_list): subquery = Position.select(Param('1')).where( Position.profile == Profile.id, Position.title << position_title_list) or_clauses.append(Clause(SQL('EXISTS'), subquery)) if any(or_clauses): clauses.append(reduce(operator.or_, or_clauses)) friends = Friend.select( Friend.friend).where(Friend.user == current_user.id).execute() clauses.append(~(Profile.id << [f.friend.id for f in friends])) profiles = Profile.select().where(reduce( operator.and_, clauses)).order_by(fn.Rand()).limit(100) for profile in profiles: profile.score = 0 for skill in skills_list: if profile.skills and skill in profile.skills: profile.score += 10 for part in location_part_list: if profile.location and part in profile.location: profile.score += 10 if any(position_title_list): profile.position_fetch = profile.positions.execute() for position_title in position_title_list: if any(position.title == position_title for position in profile.position_fetch): profile.score += 10 suggested_profiles = sorted(profiles, key=lambda profile: -profile.score)[:2] return list(map(lambda p: self.profile_to_dict(p), suggested_profiles))
def get(self): logger.log_python_api_get(ProfilesResource.api_url) skills = request.args.get('skills') role = request.args.get('role') location = request.args.get('location') company = request.args.get('company') name = request.args.get('name') hireable = request.args.get('hireable') clauses = [] if skills: skills_clauses = [ Profile.skills.contains(skill) for skill in safe_split_strip_remove_empty(skills) ] if any(skills_clauses): clauses.append(reduce(operator.or_, skills_clauses)) if role: subquery = Position.select(Param('1')).where( Position.profile == Profile.id, Position.title.contains(role)) clauses.append(Clause(SQL('EXISTS'), subquery)) if company: clauses.append(Profile.company.contains(company)) if location: clauses.append(Profile.location.contains(location)) if name: clauses.append(Profile.name.contains(name)) if hireable: hireable_clauses = [] for value in hireable.split(','): value = value.strip().lower() if value == 'yes': hireable_clauses.append(Profile.hireable == True) elif value == 'no': hireable_clauses.append(Profile.hireable == False) elif value == 'unknown': hireable_clauses.append(Profile.hireable >> None) if any(hireable_clauses): clauses.append(reduce(operator.or_, hireable_clauses)) profiles = Profile.select() if any(clauses): profiles = profiles.where(reduce(operator.and_, clauses)) positions = Position.select() profiles_with_positions = prefetch(profiles, positions) return list( map(lambda p: self.profile_to_dict(p), profiles_with_positions))
def get_plant_data_selective(p_uuid, sensor, start, stop): plant = Plant.get(uuid=p_uuid) sensor = Sensor.get(name=sensor) if plant.role != 'master' and sensor.name not in slave_supported: plant = Plant.get(uuid=plant.role) dataset = SensorData.select(SensorData.value, fn.CAST(Clause(fn.strftime('%s', SensorData.created_at), SQL('AS INT'))).alias('timestamp')) \ .where(SensorData.plant == plant) \ .where(SensorData.sensor == sensor) \ .order_by(SensorData.created_at.desc()) \ .offset(start) \ .limit(stop - start) \ .dicts() dataset = list(dataset) return json.dumps(dataset)
def get_latest_dataset(p_uuid, s_uuid): plant = Plant.get(Plant.uuid == p_uuid) sensor = Sensor.get(Sensor.name == s_uuid) if plant.role != 'master' and sensor.name not in slave_supported: plant = Plant.get(Plant.uuid == UUID(plant.role)) sd = SensorData.select(SensorData.value, SensorData.persistant, fn.CAST(Clause(fn.strftime('%s', SensorData.created_at), SQL('AS INT'))).alias('timestamp')) \ .where(SensorData.plant == plant) \ .where(SensorData.sensor == sensor) \ .order_by(SensorData.created_at.desc()) \ .limit(1) \ .dicts() selected = list(sd)[0] return json.dumps(selected)
def get_plant_sensor_data_after(p_uuid, sensor, until): sensor = Sensor.get(Sensor.name == sensor) plant = Plant.get(Plant.uuid == p_uuid) if plant.role != 'master' and sensor.name not in slave_supported: plant = Plant.get(Plant.uuid == UUID(plant.role)) date_time = datetime.datetime.fromtimestamp(until + 1) sensor_data_set = SensorData.select(SensorData.value, fn.CAST(Clause(fn.strftime('%s', SensorData.created_at), SQL('AS INT'))).alias('timestamp')) \ .where(SensorData.plant == plant) \ .where(SensorData.sensor == sensor) \ .where(SensorData.created_at > date_time) \ .order_by(SensorData.created_at.asc()) \ .dicts() sensor_data_set = list(sensor_data_set) return json.dumps(sensor_data_set)
def get_sensor_data_high_low(plant, sensor, configuration, target=None): mode = SensorData.created_at.asc( ) if configuration is False else SensorData.created_at.desc() dataset = SensorData.select(SensorData.value.alias('v'), fn.CAST(Clause(fn.strftime('%s', SensorData.created_at), SQL('AS INT'))).alias('t')) \ .where(SensorData.plant == plant) \ .where(SensorData.sensor == sensor) \ .order_by(mode) if target is not None: dataset = dataset.where(SensorData.created_at >= target) dataset = dataset.limit(1).dicts() if dataset.count() == 0: return None data = list(dataset)[0] return data
def filter_query_with_subqueries(query, filters): """ For queries that does not support joining """ plain_filters = {} subquery_filters = {} model = query.model_class filters = prepare_filters(query, filters) for key, val in filters.items(): if is_filter_deep(model, key): join_field, join_filter_key = key.split(DELIM, 1) subquery_filters.setdefault(join_field, {}).update({join_filter_key: val}) else: plain_filters[key] = val query = filter_query(query, plain_filters) for key, val in subquery_filters.items(): field = getattr(model, key) rel_model = field.rel_model query = query.where(Clause( SQL('EXISTS'), rel_model.select(SQL('1')).filter(**val).where(field == rel_model._meta.primary_key) )) return query
def generate_select(self, query, alias_map=None): model = query.model_class db = model._meta.database alias_map = self.calculate_alias_map(query, alias_map) if isinstance(query, CompoundSelect): clauses = [_StripParens(query)] else: if not query._distinct: clauses = [SQL('SELECT')] else: clauses = [SQL('SELECT DISTINCT')] if query._distinct not in (True, False): clauses += [SQL('ON'), EnclosedClause(*query._distinct)] if query._limit: clauses.append(SQL('TOP %s' % query._limit)) select_clause = Clause(*query._select) select_clause.glue = ', ' clauses.extend((select_clause, SQL('FROM'))) if query._from is None: clauses.append(model.as_entity().alias(alias_map[model])) else: clauses.append(CommaClause(*query._from)) # WINDOW semantic is ignored due to lack of knowledge (OVER ...) # if query._windows is not None: # clauses.append(SQL('WINDOW')) # clauses.append(CommaClause(*[ # Clause( # SQL(window._alias), # SQL('AS'), # window.__sql__()) # for window in query._windows])) join_clauses = self.generate_joins(query._joins, model, alias_map) if join_clauses: clauses.extend(join_clauses) if query._where is not None: clauses.extend([SQL('WHERE'), query._where]) if query._group_by: clauses.extend([SQL('GROUP BY'), CommaClause(*query._group_by)]) if query._having: clauses.extend([SQL('HAVING'), query._having]) if query._order_by: clauses.extend([SQL('ORDER BY'), CommaClause(*query._order_by)]) # if query._offset: # clauses.append(SQL('OFFSET %s ROWS' % query._offset)) # # if query._limit or (query._offset and db.limit_max): # limit = query._limit or db.limit_max # clauses.append(SQL('FETCH NEXT %s ROWS ONLY' % limit)) if query._offset: raise NotImplementedError('OFFSET is not supported') # No locking semantics supported due to lack of knowledge (WITH ...) # for_update, no_wait = query._for_update # if for_update: # stmt = 'FOR UPDATE NOWAIT' if no_wait else 'FOR UPDATE' # clauses.append(SQL(stmt)) return self.build_query(clauses, alias_map)
def extract_date(self, date_part, date_field): return fn.EXTRACT(Clause(R(date_part), R('FROM'), date_field))
def plant_sensor(p_uuid, s_uuid): # GET: select: latest, prediction, data, current, range, extreme, message # GET: max: Boolean (extreme) # GET: ever: Boolean (extreme) # GET: backlog: Boolean (extreme) # GET: start: Integer (data) # GET: stop: Integer (data) # GET: count: Boolean (data) plant = Plant.get(uuid=p_uuid) try: sensor = Sensor.get(uuid=s_uuid) except Exception: sensor = Sensor.get(name=s_uuid) if plant.role != 'master' and sensor.name not in slave_supported: plant = Plant.get(uuid=plant.role) data, code = get_data(required=PLANT_SENSOR_GET, restrictive=True) if code == 400: return data_formatting(400) selector = data['select'] maximum = data['max'] backlog = data['backlog'] start = data['start'] stop = data['stop'] ever = data['ever'] collection = {} for selected in selector: if selected == 'timespan': start = datetime.datetime.min if start <= 0 else datetime.datetime.fromtimestamp( start) stop = datetime.datetime.max if stop >= 253402297200 else datetime.datetime.fromtimestamp( stop) if selected in [ 'latest', 'current', 'extreme', 'data', 'count', 'timespan' ]: data = SensorData.select(SensorData.value, SensorData.persistant, fn.CAST(Clause(fn.strftime('%s', SensorData.created_at), SQL('AS INT'))).alias('timestamp')) \ .where(SensorData.plant == plant) \ .where(SensorData.sensor == sensor) \ .order_by(SensorData.created_at.asc()) \ .dicts() elif selected in ['prediction']: data = SensorDataPrediction.select(SensorDataPrediction.value, fn.CAST(Clause(fn.strftime('%s', SensorDataPrediction.time), SQL('AS INT'))).alias('timestamp')) \ .where(SensorDataPrediction.plant == plant) \ .where(SensorDataPrediction.sensor == sensor) \ .order_by(SensorDataPrediction.created_at.asc()) \ .dicts() elif selected in ['range']: data = SensorSatisfactionValue.select() \ .where(SensorSatisfactionValue.plant == plant) \ .where(SensorSatisfactionValue.sensor == sensor) if selected in ['latest', 'current']: data = data.order_by(SensorData.created_at.desc()).limit(1) output = list(data)[0] elif selected == 'prediction': output = list(data) elif selected == 'range': output = {} for spectrum in data: output[spectrum.level.name_color] = { 'max': spectrum.max_value, 'min': spectrum.min_value } if spectrum.level.name_color == 'red': output[spectrum.level.name_color] = { 'max': sensor.max_value, 'min': sensor.min_value } elif selected == 'extreme': target = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) if backlog and not ever: output = None while output is None: output = get_sensor_data_high_low(plant, sensor, maximum, target) target = target - datetime.timedelta(days=1) else: if ever: target = None output = get_sensor_data_high_low(plant, sensor, maximum, target) elif selected == 'count': output = data.count() elif selected == 'timespan': data = data.where(SensorData.created_at > start, SensorData.created_at < stop) output = list(data) else: data = data.order_by( SensorData.created_at.desc()).offset(start).limit(stop - start) output = list(data) if len(selector) > 1: collection[selected] = output if len(collection.keys()) != 0: output = collection return data_formatting(data=output)
def plant(p_uuid): if request.method == 'GET': # GET: select: intervals, created_at, type/species, survived, location, full, default (full) data, code = get_data(required=PLANT_GET, restrictive=True, hardmode=True) if code == 400: return data_formatting(400) collection = {} selector = data['select'] for selected in selector: plant = list( Plant.select( Plant, fn.CAST( Clause(fn.strftime('%s', Plant.created_at), SQL('AS INT'))).alias('created_at')).where( Plant.uuid == p_uuid).dicts())[0] plant['uuid'] = str(plant['uuid']) if selected not in ['full', 'intervals', 'survived']: output = {selected: plant[selected]} elif selected in ['intervals']: output = { 'connection_lost': plant['connection_lost'], 'non_persistant': int(plant['persistant_hold'] * 5 / 60 / 24), 'notification': plant['interval'] } elif selected in ['survived']: difference = datetime.datetime.now( ) - datetime.datetime.fromtimestamp(plant['created_at']) output = float(difference.days + round((difference.seconds // 3600) / 24, 1)) else: output = plant if len(selector) > 1: collection[selected] = output if len(collection.keys()) != 0: output = collection return data_formatting(data=output) elif request.method == 'POST': # POST: replace: name, type, location, ranges, responsible # POST: mode: add, reset, online, offline # POST: new: DATA data, code = get_data(required=PLANT_POST, restrictive=True) if code == 400: return data_formatting(400) keys = list(data.keys()) if data['mode'] == '': mode = 'add' if 'satisfaction' in keys else 'offline' else: mode = data['mode'].lower() plant = Plant.get(uuid=p_uuid) if data['name'] != '': plant.name = data['name'] if data['species'] != '': plant.species = data['species'] if data['location'] != '': plant.location = data['location'] if data['ranges']: try: sensor = Sensor.get(name=data['sensor']) except Exception: try: sensor = Sensor.get(uuid=data['sensor']) except Exception: return data_formatting(400) level_yellow = SensorSatisfactionLevel.get( SensorSatisfactionLevel.name_color == 'yellow') level_green = SensorSatisfactionLevel.get( SensorSatisfactionLevel.name_color == 'green') value_yellow = SensorSatisfactionValue.get( SensorSatisfactionValue.plant == plant, SensorSatisfactionValue.sensor == sensor, SensorSatisfactionValue.level == level_yellow) value_green = SensorSatisfactionValue.get( SensorSatisfactionValue.plant == plant, SensorSatisfactionValue.sensor == sensor, SensorSatisfactionValue.level == level_green) value_yellow.min_value = int(request.form.getlist('range[]')[0]) value_green.min_value = int(request.form.getlist('range[]')[1]) value_green.max_value = int(request.form.getlist('range[]')[2]) value_yellow.max_value = int(request.form.getlist('range[]')[3]) value_green.save() value_yellow.save() MeshDedicatedDispatch().update('plant satisfaction level', plant.uuid) if sensor.name == 'moisture' and plant.role != 'master': # logger.info('executing slave update') information = { 'min': value_yellow.min_value, 'max': value_yellow.max_value } MeshDedicatedDispatch().slave_update(2, information, plant) if data['responsible']: person = Person.get(email=data['email'], name=data['firstname']) plant.person = person plant.save() if data['satisfaction']: if mode == 'add': plant.sat_streak += 1 else: plant.sat_streak = 1 if data['alive']: counterpart = 'online' if mode == 'offline' else 'offline' status = PlantNetworkStatus.get(name=mode) counterpart = PlantNetworkStatus.get(name=counterpart) if plant.role == 'master': return data_formatting() status = PlantNetworkUptime.get(plant=plant, status=status) counterpart = PlantNetworkUptime.get(plant=plant, status=counterpart) if counterpart.current != 0: counterpart.current = 0 counterpart.save() status.current += 1 status.overall += 1 status.save() if data['notification']: _, _, hours = time_request_from_converter(data) plant.interval = int(round(hours)) if data['connection-lost']: _, minutes, _ = time_request_from_converter(data) plant.connection_lost = int(round(minutes)) if data['non-persistant']: _, minutes, _ = time_request_from_converter(data) plant.persistant_hold = int(round(minutes / 5)) plant.save() MeshDedicatedDispatch().update('plant', plant.uuid) return data_formatting()