def data_gsm_all(request): '''Import unchecked GSM data. ''' response = 'Success' ptt_pattern = re.compile('[0]*(?P<platform>[0-9]+)g') try: file = request.POST['file'].file filename = request.POST['file'].filename platform = int(ptt_pattern.search(filename).group('platform')) query = select([DataGsm.date]).where(DataGsm.platform_ == platform) # Read dates that are already in the database df = pd.DataFrame.from_records(DBSession.execute(query).fetchall(), index=DataGsm.date.name, columns=[DataGsm.date.name]) # Load raw csv data csv_data = pd.read_csv(file, sep='\t', index_col=0, parse_dates=True, # Read those values as NaN na_values=['No Fix', 'Batt Drain', 'Low Voltage'], # Only import the first 8 columns usecols=range(9) ) # Remove the lines containing NaN csv_data.dropna(inplace=True) # Filter data with no elevation by converting the column to numeric type csv_data[DataGsm.ele.name] = csv_data[DataGsm.ele.name].convert_objects(convert_numeric=True) # Get the data to insert data_to_insert = csv_data[~csv_data.index.isin(df.index)] # Add the platform to the DataFrame data_to_insert[DataGsm.platform_.name] = platform # Write into the database data_to_insert.to_sql(DataGsm.__table__.name, DBSession.get_bind(), if_exists='append') except: response = 'An error occured.' request.response.status_code = 500 return response
def map_add(request): points = request.json_body name = request.matchdict['name'] geoms = [{'geo':'POINT(' + point['lat'] + ', ' + point['lon'] + ', ' + srid + ')'} for point in points] Base.metadata.reflect(schema = schema, only = [name]) DBSession.execute(Base.metadata.tables['.'.join([schema, name])].insert(), geoms) return HTTPOk()
def indiv_details(request): params=int(request.matchdict['id']) join_table = join(SatTrx, ObjectsCaracValues, SatTrx.ptt == cast(ObjectsCaracValues.value, Integer) ).join(Individual, ObjectsCaracValues.object==Individual.id) query=select([ObjectsCaracValues.value.label('id'), Individual.id.label('ind_id'),Individual.survey_type.label('survey_type'), Individual.status.label('status') , Individual.monitoring_status.label('monitoring_status'), Individual.birth_date.label('birth_date'), Individual.ptt.label('ptt'),ObjectsCaracValues.begin_date.label('begin_date'),ObjectsCaracValues.end_date.label('end_date')] ).select_from(join_table ).where(and_(SatTrx.model.like('GSM%'),ObjectsCaracValues.carac_type==19,ObjectsCaracValues.object_type=='Individual') ).where(ObjectsCaracValues.value==params).order_by(desc(ObjectsCaracValues.begin_date)) data=DBSession.execute(query).first() transaction.commit() if data['end_date'] == None : end_date=datetime.datetime.now() else : end_date=data['end_date'] result=dict([ (key[0],key[1]) for key in data.items()]) print(result) result['duration']=(end_date.month-data['begin_date'].month)+(end_date.year-data['begin_date'].year)*12 query = select([V_Individuals_LatLonDate.c.date] ).where(V_Individuals_LatLonDate.c.ind_id == result['ind_id'] ).order_by(desc(V_Individuals_LatLonDate.c.date)).limit(1) lastObs=DBSession.execute(query).fetchone() result['last_observation']=lastObs['date'].strftime('%d/%m/%Y') if result['birth_date']!= None: result['birth_date']=result['birth_date'].strftime('%d/%m/%Y') del result['begin_date'], result['end_date'] print (result) return result
def data_gsm_unchecked_import(request): '''Import unchecked GSM data. ''' ptt=request.matchdict['id'] data = list(request.params) print(data) data=json.loads(data[0]).get('data') id_ind=json.loads(data[0]).get('id_ind') select_stmt = DBSession.execute(select([DataGsm],DataGsm.id.in_(data))).fetchall() query = insert(AnimalLocation, select_stmt) DBSession.execute(query)
def data_gsm_unchecked(request): '''Get the unchecked GSM data. ''' platform = int(request.matchdict['id']) if request.GET['format'] == 'geojson': # Query query = select([ DataGsm.id.label('id'), DataGsm.lat, DataGsm.lon, DataGsm.date ]).where(DataGsm.platform_ == platform).where(DataGsm.checked == False ).order_by(desc(DataGsm.date)).limit(1000) # Create list of features from query result features = [ { 'type':'Feature', 'properties':{'date':str(date)}, 'geometry':{'type':'Point', 'coordinates':[float(lon),float(lat)]}, 'id':id_ } for id_, lat, lon, date in DBSession.execute(query).fetchall()] transaction.commit() result = {'type':'FeatureCollection', 'features':features} return result elif request.GET['format'] == 'json': # Query query = select([ DataGsm.id.label('id'), DataGsm.lat.label('lat'), DataGsm.lon.label('lon'), DataGsm.ele.label('ele'), DataGsm.date.label('date')] ).where(DataGsm.platform_ == platform ).where(DataGsm.checked == False ).order_by(desc(DataGsm.date)).limit(1000) data = DBSession.execute(query).fetchall() # Load data from the DB then # compute the distance between 2 consecutive points. df = pd.DataFrame.from_records(data, columns=data[0].keys(), coerce_float=True) X1 = df.iloc[:-1][['lat', 'lon']].values X2 = df.iloc[1:][['lat', 'lon']].values df['dist'] = np.append(haversine(X1, X2), 0).round(3) # Compute the speed df['speed'] = (df['dist']/((df['date']-df['date'].shift(-1)).fillna(1)/np.timedelta64(1, 'h'))).round(3) # Values to import : the first per hour ids = df.set_index('date').resample('1H', how='first').dropna().id.values df['import'] = df.id.isin(ids) df['date'] = df['date'].apply(str) # Fill NaN df.fillna(value={'ele':-999}, inplace=True) return df.to_dict('records')
def rfid_add(request): try: obj = ObjectRfid() obj.identifier = request.json_body.get('identifier', None) obj.creator = request.authenticated_userid DBSession.add(obj) rfid = DBSession.query(ObjectRfid.id ).filter(ObjectRfid.identifier==obj.identifier).scalar() except IntegrityError: request.response.status_code = 500 return 'Error: An object with the same identifier already exists.' return ' '.join(['Success: RFID module created with ID =', str(rfid), '.'])
def core_individuals_search(request): """Search individuals by posting a JSON object containing the fields : - criteria : dict object with column_name:value fields - order_by : dict object with column_name:'asc' or column_name:'desc' fields - offset : int - limit : int """ query = select(V_SearchIndiv.c) # Look over the criteria list criteria = json.loads(request.POST.get("criteria", "{}")) for column_name, obj in criteria.items(): if column_name in V_SearchIndiv.c: column = V_SearchIndiv.c[column_name] value = obj["value"] op = obj.get("op", "is") if op in ["is", "null"]: query = query.where(column == value) elif op == "is not": query = query.where(column != value or column == None) elif op == "not null": query = query.where(column != value) elif op == "begin with": query = query.where(column.like(value + "%")) elif op == "not begin with": query = query.where(column.notlike(value + "%")) # Set sorting columns and order order_by = json.loads(request.POST.get("order_by", "[]")) order_by_clause = [] for obj in order_by: column, order = obj.split(":") if column in V_SearchIndiv.columns: if order == "asc": order_by_clause.append(V_SearchIndiv.columns[column].asc()) elif order == "desc": order_by_clause.append(V_SearchIndiv.columns[column].desc()) if len(order_by_clause) > 0: query = query.order_by(*order_by_clause) # Run query total = DBSession.execute(select([func.count()]).select_from(query.alias())).scalar() # Define the limit and offset if exist offset = int(request.POST.get("offset", 0)) limit = int(request.POST.get("per_page", 0)) if limit > 0: query = query.limit(limit) if offset > 0: query = query.offset(offset) result = [{"total_entries": total}] data = DBSession.execute(query).fetchall() result.append([OrderedDict(row) for row in data]) return result
def monitoredSite_name(request): typeSite=request.params.get('type') print(typeSite) query =select([MonitoredSite.name] ).order_by(MonitoredSite.name) if typeSite!=None: query=query.where(MonitoredSite.type_==typeSite) data = DBSession.execute(query).fetchall() return [row['name'] for row in data] else : data = DBSession.execute(query).fetchall() return [dict(site) for site in data]
def core_individuals_search(request): '''Search individuals by posting a JSON object containing the fields : - criteria : dict object with column_name:value fields - order_by : dict object with column_name:'asc' or column_name:'desc' fields - offset : int - limit : int ''' query = select(ObjectGsm.__table__.c) # Look over the criteria list criteria = request.json_body.get('criteria', {}) for column, value in criteria.items(): if column in ObjectGsm.__table__.c and value != '': query = query.where(ObjectGsm.__table__.c[column] == value) # Define the limit and offset if exist limit = int(request.json_body.get('limit', 0)) offset = int(request.json_body.get('offset', 0)) if limit > 0: query = query.limit(limit) if offset > 0: offset = query.offset(offset) # Set sorting columns and order order_by = request.json_body.get('order_by', {}) order_by_clause = [] for column, order in order_by.items(): if column in ObjectGsm.__table__.columns: if order == 'asc': order_by_clause.append(ObjectGsm.__table__.columns[column].asc()) elif order == 'desc': order_by_clause.append(ObjectGsm.__table__.columns[column].desc()) if len(order_by_clause) > 0: query = query.order_by(*order_by_clause) # Run query return [OrderedDict(row) for row in DBSession.execute(query).fetchall()]
def core_individuals_stations(request): """ Get the stations of an identified individual. Parameter is : id (int)""" try: id = int(request.params["id"]) # Query query = ( select( [ cast(V_Individuals_LatLonDate.c.lat, Float), cast(V_Individuals_LatLonDate.c.lon, Float), V_Individuals_LatLonDate.c.date, ] ) .where(V_Individuals_LatLonDate.c.ind_id == id) .order_by(desc(V_Individuals_LatLonDate.c.date)) ) # Create list of features from query result epoch = datetime.utcfromtimestamp(0) features = [ { "type": "Feature", "properties": {"date": (date - epoch).total_seconds()}, "geometry": {"type": "Point", "coordinates": [lon, lat]}, } for lat, lon, date in reversed(DBSession.execute(query).fetchall()) ] result = {"type": "FeatureCollection", "features": features} return result except: return []
def data_gsm_unchecked_list(request): '''List unchecked GSM data. ''' unchecked = select([ DataGsm.platform_, DataGsm.date ]).alias() print (unchecked) pie = ProtocolIndividualEquipment unchecked_with_ind = select([ pie.ind_id.label('ind_id'), unchecked.c.platform_, func.count().label('nb') ]).select_from( unchecked.join(SatTrx, SatTrx.ptt == unchecked.c.platform_) .outerjoin( pie, and_(SatTrx.id == pie.sat_id, unchecked.c['DateTime'] >= pie.begin_date, or_( unchecked.c['DateTime'] < pie.end_date, pie.end_date == None ) ) ) ).group_by(unchecked.c.platform_, pie.ind_id).order_by(unchecked.c.platform_) # Populate Json array data = DBSession.execute(unchecked_with_ind).fetchall() return [dict(row) for row in data]
def map_add(request): name = request.matchdict['name'] table = '.'.join([schema, name]) point = "geo.STDistance(geometry::Point({lat}, {lon}, {srid}))".format(lon=request.params['lon'], lat=request.params['lat'], srid=srid) tol = float(request.params['tol']) id, dist = DBSession.execute(text('select TOP 1 id, {point} from {table} where {point} is not null and {point} < :tol order by {point} asc'.format(point=point, table=table)), {'tol':tol}).fetchone() return {'id':id, 'dist':dist}
def current_user(request): """Return the list of all the users with their ids. """ query = select([ User.id, User.fullname.label('fullname') ]).where(User.id == request.authenticated_userid) return dict(DBSession.execute(query).fetchone())
def argos_check(request): argos_id = array('i') gps_id = array('i') try: for ptt_obj in request.json_body: ptt = ptt_obj['ptt'] ind_id = ptt_obj['ind_id'] for location in ptt_obj['locations']: if location['type'] == 0: argos_id.append(location['id']) elif location['type'] == 1: gps_id.append(location['id']) DBSession.execute(update(Argos).where(Argos.id.in_(argos_id)).values(checked=True)) DBSession.execute(update(Gps).where(Gps.id.in_(gps_id)).values(checked=True)) return {'argosChecked': len(argos_id), 'gpsChecked':len(gps_id)} except Exception as e: raise
def users(request): """Return the list of all the users with their ids. """ query = select([ User.id, User.fullname.label('fullname') ]).order_by(User.lastname, User.firstname) return [dict(row) for row in DBSession.execute(query).fetchall()]
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ settings['sqlalchemy.url'] = settings['cn.dialect'] + quote_plus(settings['sqlalchemy.url']) engine = engine_from_config(settings, 'sqlalchemy.') dbConfig['data_schema'] = settings['data_schema'] dbConfig['sensor_schema'] = settings['sensor_schema'] dbConfig['url'] = settings['sqlalchemy.url'] DBSession.configure(bind=engine) Base.metadata.bind = engine Base.metadata.create_all(engine) Base.metadata.reflect(views=True, extend_existing=False) authn_policy = AuthTktAuthenticationPolicy( settings['auth.secret'], cookie_name='ecoReleve-Core', callback=role_loader, hashalg='sha512', max_age=86400) authz_policy = ACLAuthorizationPolicy() config = Configurator(settings=settings) # Add renderer for datetime objects json_renderer = JSON() json_renderer.add_adapter(datetime, datetime_adapter) json_renderer.add_adapter(Decimal, decimal_adapter) config.add_renderer('json', json_renderer) # Add renderer for CSV files. config.add_renderer('csv', CSVRenderer) config.add_renderer('pdf', PDFrenderer) config.add_renderer('gpx', GPXRenderer) # Set up authentication and authorization config.set_authentication_policy(authn_policy) config.set_authorization_policy(authz_policy) config.set_root_factory(SecurityRoot) # Set the default permission level to 'read' config.set_default_permission('read') config.include('pyramid_tm') #config.set_request_factory(request_factory) add_routes(config) #add_views(config) config.scan() return config.make_wsgi_app()
def argos_insert(request): stations = [] argos_id = array('i') gps_id = array('i') # Query that check for duplicate stations check_duplicate_station = select([func.count(Station.id)]).where( and_(Station.name == bindparam('name'), Station.lat == bindparam('lat'), Station.lon == bindparam('lon'), Station.ele == bindparam('ele'))) # For each objet in the request body for ptt_obj in request.json_body: ptt = ptt_obj['ptt'] ind_id = ptt_obj['ind_id'] # For each location associated with this object for location in ptt_obj['locations']: # Argos if location['type'] == 0: # Get all the informations about the sensor data argos_data = DBSession.query(Argos).filter_by(id=location['id']).one() name = 'ARGOS_' + str(argos_data.ptt) + '_' + argos_data.date.strftime('%Y%m%d%H%M%S') if DBSession.execute(check_duplicate_station, {'name':name, 'lat':argos_data.lat, 'lon':argos_data.lon, 'ele':argos_data.ele}).scalar() == 0: argos = ProtocolArgos(ind_id=ind_id, lc=argos_data.lc, iq=argos_data.iq, nbMsg=argos_data.nbMsg, nbMsg120=argos_data.nbMsg120, bestLvl=argos_data.bestLvl, passDuration=argos_data.passDuration, nopc=argos_data.nopc, frequency=argos_data.frequency) station = Station(date=argos_data.date, name=name, fieldActivityId=27, fieldActivityName='Argos', lat=argos_data.lat, lon=argos_data.lon, ele=argos_data.ele, protocol_argos=argos) # Add the station in the list argos_id.append(location['id']) stations.append(station) # Gps elif location['type'] == 1: gps_data = DBSession.query(Gps).filter_by(id=location['id']).one() name = 'ARGOS_' + str(gps_data.ptt) + '_' + gps_data.date.strftime('%Y%m%d%H%M%S') if DBSession.execute(check_duplicate_station, {'name':name, 'lat':argos_data.lat, 'lon':argos_data.lon, 'ele':argos_data.ele}).scalar() == 0: gps = ProtocolGps(ind_id=ind_id, course=gps_data.course, speed=gps_data.speed) station = Station(date=argos_data.date, name=name, fieldActivityId=27, fieldActivityName='Argos', lat=argos_data.lat, lon=argos_data.lon, ele=argos_data.ele, protocol_gps=gps) # Add the station in the list gps_id.append(location['id']) stations.append(station) # Insert the stations (and protocols thanks to ORM) DBSession.add_all(stations) # Update the sensor database DBSession.execute(update(Argos).where(Argos.id.in_(argos_id)).values(checked=True, imported=True)) DBSession.execute(update(Gps).where(Gps.id.in_(gps_id)).values(checked=True, imported=True)) return {'status':'ok'}
def rfid_active_byDate(request): date = datetime.strptime(request.params['date'], '%d/%m/%Y %H:%M:%S') data = DBSession.query(MonitoredSite.name, MonitoredSite.type_, MonitoredSitePosition.lat, MonitoredSitePosition.lon ).outerjoin(MonitoredSitePosition, MonitoredSite.id==MonitoredSitePosition.id ).filter(MonitoredSitePosition.begin_date <= date ).filter(or_(MonitoredSitePosition.end_date >= date, MonitoredSitePosition.end_date == None )).all() siteName_type=[{'type':row[1] , 'name':row[0], 'positions': {'lat': row[2], 'lon': row[3] }} for row in data] result = {'siteType': list(set([row[1] for row in data])), 'siteName_type': siteName_type} return result
def autocomplete(request): try: data_schema = request.registry.settings['data_schema'] table_name = '.'.join([data_schema, request.params['table_name']]) column_name = request.params['column_name'] query = text('select distinct {column} from {table}'.format(column=column_name, table=table_name)) return [item[0] for item in DBSession.execute(query).fetchall()] except: return []
def rfid_detail(request): name = request.matchdict['name'] data = DBSession.query(ObjectRfid, MonitoredSite, MonitoredSiteEquipment ).outerjoin(MonitoredSiteEquipment, ObjectRfid.id==MonitoredSiteEquipment.obj ).outerjoin(MonitoredSite, MonitoredSiteEquipment.site==MonitoredSite.id ).filter(ObjectRfid.identifier==name ).order_by(desc(MonitoredSiteEquipment.begin_date)).first() module, site, equip = data result = {'module': module, 'site':site, 'equip':equip} return result
def monitored_site_equipment_pose(request): t = MonitoredSiteEquipment pose_info = request.POST creator= request.authenticated_userid values = {t.creator.name:creator} obj = DBSession.query(ObjectRfid.id).filter(ObjectRfid.identifier==pose_info['identifier']).scalar() site = DBSession.execute(select([MonitoredSite.id] ).where(MonitoredSite.type_==pose_info['type'] ).where(MonitoredSite.name==pose_info['name'])).scalar() begin_date = parse(request.POST['begin']) if(pose_info['action'] == 'pose'): stmt = insert(MonitoredSiteEquipment) message = '1 row inserted' values[t.obj.name] = obj values[t.site.name] = site values[t.begin_date.name] = begin_date lat, lon = DBSession.execute(select([MonitoredSitePosition.lat, MonitoredSitePosition.lon]).where( MonitoredSitePosition.site == site).where(MonitoredSitePosition.begin_date <= begin_date).where( or_(MonitoredSitePosition.end_date == None, MonitoredSitePosition.end_date <= begin_date))).fetchone() values[t.lat.name] = lat values[t.lon.name] = lon values[t.end_date.name] = parse(pose_info['end']) elif(pose_info['action'] == 'remove'): stmt = update(MonitoredSiteEquipment ).where(MonitoredSiteEquipment.obj==obj ).where(MonitoredSiteEquipment.site==site) message = '1 row updated' d = parse(pose_info['end']) if d is not None: values[t.end_date.name] = d else: request.response.status_code = 500 return 'Nothing to update' else: request.response.status_code = 500 return 'Unknown action' try: DBSession.execute(stmt, values) except IntegrityError as e: request.response.status_code = 500 return e return message
def login(request): user_id = request.POST.get('user_id', '') pwd = request.POST.get('password', '') user = DBSession.query(User).filter(User.id==user_id).one() if user is not None and user.check_password(pwd): headers = remember(request, user_id) response = request.response response.headerlist.extend(headers) return response else: return HTTPUnauthorized()
def monitoredSite_byName(request): nameSite=request.params.get('name') typeSite=request.params.get('type') query=select([MonitoredSitePosition.lat.label('lat'),MonitoredSitePosition.lon.label('lon') , MonitoredSite.id.label('id_site'),MonitoredSitePosition.ele.label('ele'),MonitoredSitePosition.precision.label('precision')] ).select_from(join(MonitoredSitePosition, MonitoredSite , MonitoredSitePosition.site == MonitoredSite.id) ).where(and_(MonitoredSite.name==nameSite, MonitoredSite.type_==typeSite)).order_by(desc(MonitoredSitePosition.begin_date)) data = DBSession.execute(query).first() return dict([ (key,val) for key,val in data.items()])
def monitoredSite_post(request): cols = request.json_body['cols'] order = request.json_body['order'] select_clause = [] for col in cols: if col in MonitoredSite.__table__.c: select_clause.append(MonitoredSite.__table__.c[col]) query = select(select_clause) for ord in order: if ord in MonitoredSite.__table__.c: query = query.order_by(MonitoredSite.__table__.c[ord]) data = DBSession.execute(query).fetchall() return [dict(site) for site in data]
def rfid_validate(request): #TODO: SQL SERVER specific code removal stmt = text(""" DECLARE @error int, @nb int; EXEC """ + dbConfig['data_schema'] + """.sp_validate_rfid :user, @nb OUTPUT, @error OUTPUT; SELECT @error, @nb;""" ).bindparams(bindparam('user', request.authenticated_userid)) error_code, nb = DBSession.execute(stmt).fetchone() if error_code == 0: if nb > 0: return 'Success : ' + str(nb) + ' new rows inserted in table T_AnimalLocation.' else: return 'Warning : no new row inserted.' else: return 'Error : an error occured during validation process (error code : ' + str(error_code) + ' )'
def core_individual(request): """ Get the attributes of an identified individual. """ id = int(request.matchdict["id"]) indiv = DBSession.query(Individual).filter(Individual.id == id).one() query = ( select( [ V_Individuals_History.c.label, V_Individuals_History.c.value, cast(V_Individuals_History.c.begin_date, Date), cast(V_Individuals_History.c.end_date, Date), ] ) .where(V_Individuals_History.c.id == id) .order_by(V_Individuals_History.c.carac, desc(V_Individuals_History.c.begin_date)) ) carac = DBSession.execute(query).fetchall() null_date_filter = lambda date: None if date is None else str(date) indiv.history = [ OrderedDict([("name", label), ("value", value), ("from", str(begin_date)), ("to", null_date_filter(end_date))]) for label, value, begin_date, end_date in carac ] return indiv
def transmitter_values(request): ''' Get the different values of the field_name given in parameter. If a parameter limit is passed, then limit the number of values returned. ''' column = request.params['field_name'] limit = int(request.params.get('limit', 0)) if column in ObjectGsm.__table__.columns: query = select([ObjectGsm.__table__.c[column]] ).where(ObjectGsm.__table__.columns[column]!=None ).order_by(ObjectGsm.__table__.columns[column] ).distinct() if limit > 0: query = query.limit(limit) return [str(item[column]) for item in DBSession.execute(query).fetchall()] else: return []
def argos_unchecked_list(request): """Returns the unchecked sensor data summary. """ # SQL query unchecked = union_all( select([ Argos.pk, Argos.ptt.label('ptt'), Argos.date, literal('argos/gps').label('type') ]).where(Argos.checked == False), select([ Gps.pk, Gps.ptt.label('ptt'), Gps.date, literal('argos/gps').label('type') ]).where(Gps.checked == False), select([ Gsm.pk_id, Gsm.fk_ptt.label('ptt'), Gsm.date, literal('gsm').label('type') ]).where(Gsm.checked == False) ).alias() # Add the bird associated to each ptt. pie = ProtocolIndividualEquipment unchecked_with_ind = select([ pie.ind_id.label('ind_id'), 'ptt', func.count().label('count'), 'type' ]).select_from( unchecked.join(SatTrx, SatTrx.ptt == unchecked.c.ptt) .outerjoin( pie, and_(SatTrx.id == pie.sat_id, unchecked.c.date >= pie.begin_date, or_( unchecked.c.date < pie.end_date, pie.end_date == None ) ) ) ).group_by('ptt', 'type', pie.ind_id)#.order_by('ptt') # Populate Json array data = DBSession.execute(unchecked_with_ind).fetchall() return [dict(row) for row in data]
def core_individuals_values(request): """ Get the different values of the field_name given in parameter. If a parameter limit is passed, then limit the number of values returned. """ column = request.params["field_name"] limit = int(request.params.get("limit", 0)) if column in V_SearchIndiv.columns: query = ( select([V_SearchIndiv.c[column]]) .where(V_SearchIndiv.columns[column] != None) .order_by(V_SearchIndiv.columns[column]) .distinct() ) if limit > 0: query = query.limit(limit) return [str(item[column]) for item in DBSession.execute(query).fetchall()] else: return []
def rfids_geoJSON(request): table=Base.metadata.tables['RFID_MonitoredSite'] criteria = request.json_body.get('criteria', {}) print(type(criteria)) print(criteria) query = select(table.c) for obj in criteria: query=query.where(eval_binary_expr(table.c[obj['Column']], obj['Operator'], obj['Value'])) data=DBSession.execute(query).fetchall() geoJson=[] for row in data: geoJson.append({'type':'Feature', 'properties':{'name':row['Name']}, 'geometry':{'type':'Point', 'coordinates':[row['lon'],row['lat']]}}) return {'type':'FeatureCollection', 'features':geoJson}