def setUp(self): """ Set up organisation records """ # auth = current.auth s3db = current.s3db auth.override = True ptable = s3db.project_project atable = s3db.project_activity p1 = Row(name="Test Project 1", code="TP1") p1_id = ptable.insert(**p1) p1.update(id=p1_id) a1 = Row(name="Test Activity 1", project_id=p1_id) a1_id = atable.insert(**a1) a1.update(id=a1_id) # activity without a project a2 = Row(name="Test Activity 2") a2_id = atable.insert(**a2) a2.update(id=a2_id) self.p1 = p1 self.a1 = a1 self.a2 = a2
def __init__(self, db, data_id, *args, **kwargs): from helpers.properties import PropertyManager from helpers.document import DOCUMENT_META_DEFAULTS #from helpers.document import DOCUMENT_META_DEFAULTS, DOC_FIELD_META_DEFAULTS self._db = db self.__DATA_ID = data_id or None self.__doc_fields = None Row.__init__(self, *args, **kwargs) #self.DOC_FIELDS = self._db((self._db.DocumentField.document == 2)& # (self._db.DocumentField.doc_parent==self.id)& # (self._db.DocumentField.doc_parent_id==self.id)).select(self._db.DocumentField.ALL, orderby=self._db.DocumentField.idx) #& (self._db.DocumentField.df_type.belongs(fields.keys())) #map(lambda x: (setattr(x, 'PARENT', self), PropertyManager(x, x.df_meta, DOC_FIELD_META_DEFAULTS)), self.DOC_FIELDS) PropertyManager(self, self.doc_meta, DOCUMENT_META_DEFAULTS)
def clean_object(d=None, T=lambda x, y: (x, y)): """ Takes a Row object and clean unpickeable objects """ from gluon.dal import Row, Set, Field from gluon.storage import Storage from gluon.languages import lazyT import datetime # print type(datetime.date(2012, 01, 01)) if not d: d = {"nome": "BRuno", "field": Field("test"), "row": Row(), "sto": Storage(), "data": {"birthdate": datetime.date(2012, 01, 01), "nome": "bla", "T": lazyT("aaaa %s", "bbbb"), "l": lambda: 1, "st": Storage(), "set": Set(None, None), "valor": 1.4}, "numero": 7} for item, value in d.items(): if isinstance(value, (Row, Storage)): d[item] = dict(value) if isinstance(value, lazyT): d[item] = str(T(value.m, value.s)) if isinstance(value, (Set, Field, type(lambda: 1))): d[item] = "" for item, value in d.items(): if isinstance(value, dict): for iitem, ivalue in value.items(): if isinstance(ivalue, (Row, Storage)): value[iitem] = dict(ivalue) if isinstance(ivalue, lazyT): value[iitem] = str(T(ivalue.m, ivalue.s)) if isinstance(ivalue, (Set, Field, type(lambda: 1))): value[iitem] = "" return d
def get_base_location(self, _fields=None, _filter=None, as_rows=False): """ Get the base location of the instance(s) @param _fields: fields to retrieve from the location records (None for ALL) @param _filter: filter for the locations @param as_rows: return the result as Rows object @returns: the base location(s) of the current instance """ db = current.db s3db = current.s3db ltable = s3db[LOCATION] rtable = self.rtable locations = [] for r in self.records: location = None query = None if LOCATION_ID in r: query = (ltable.id == r[LOCATION_ID]) if rtable: query = query & (rtable[LOCATION_ID] == ltable.id) if TRACK_ID in r: query = query & (rtable[TRACK_ID] == r[TRACK_ID]) elif TRACK_ID in r: q = (self.table[TRACK_ID] == r[TRACK_ID]) trackable = db(q).select(limitby=(0, 1)).first() table = s3db[trackable.instance_type] if LOCATION_ID in table.fields: query = ((table[TRACK_ID] == r[TRACK_ID]) & (table[LOCATION_ID] == ltable.id)) if query: if _filter is not None: query = query & _filter if not _fields: location = db(query).select(ltable.ALL, limitby=(0, 1)).first() else: location = db(query).select(limitby=(0, 1), *_fields).first() if location: locations.append(location) else: # Ensure we return an entry so that indexes match locations.append(Row({"lat": None, "lon": None})) if as_rows: return Rows(records=locations, compact=False) if not locations: return None elif len(locations) == 1: return locations[0] else: return locations
def get_location(self, timestmp=None, _fields=None, _filter=None, as_rows=False, exclude=[]): """ Get the current location of the instance(s) (at the given time) @param timestmp: last datetime for presence (defaults to current time) @param _fields: fields to retrieve from the location records (None for ALL) @param _filter: filter for the locations @param as_rows: return the result as Rows object @param exclude: interlocks to break at (avoids circular check-ins) @returns: a location record, or a list of location records (if multiple) """ db = current.db s3db = current.s3db ptable = s3db[PRESENCE] ltable = s3db[LOCATION] if timestmp is None: timestmp = datetime.utcnow() locations = [] for r in self.records: location = None if TRACK_ID in r: query = ((ptable.deleted == False) & \ (ptable[TRACK_ID] == r[TRACK_ID]) & \ (ptable.timestmp <= timestmp)) presence = db(query).select(orderby=~ptable.timestmp, limitby=(0, 1)).first() if presence: if presence.interlock: exclude = [r[TRACK_ID]] + exclude tablename, record = presence.interlock.split(",", 1) trackable = S3Trackable(tablename, record) record = trackable.records.first() if TRACK_ID not in record or \ record[TRACK_ID] not in exclude: location = trackable.get_location( timestmp=timestmp, exclude=exclude, _fields=_fields).first() elif presence.location_id: query = (ltable.id == presence.location_id) if _filter is not None: query = query & _filter if _fields is None: location = db(query).select(ltable.ALL, limitby=(0, 1)).first() else: location = db(query).select(limitby=(0, 1), *_fields).first() if not location: if len(self.records) > 1: trackable = S3Trackable(r, rtable=self.rtable) else: trackable = self location = trackable.get_base_location(_fields=_fields) if location: locations.append(location) else: # Ensure we return an entry so that indexes match locations.append(Row({"lat": None, "lon": None})) if as_rows: return Rows(records=locations, compact=False) if not locations: return None else: return locations
def get_fortunes(self, new_message): fortunes = self.db(self.db.Fortune).select(cacheable=True) fortunes.records.append(Row(new_message)) return fortunes.sort(itemgetter('message'))
def __init__(self, meta, *args, **kwargs): self.META = meta Row.__init__(self, *args, **kwargs) self['__saved'] = (True, self['id'])
def parse_as_rest(self, patterns, args, vars, queries=None, nested_select=True, custom_select=None): """ EXAMPLE: db.define_table('person',Field('name'),Field('info')) db.define_table('pet',Field('owner',db.person),Field('name'),Field('info')) @request.restful() def index(): def GET(*args,**vars): patterns = [ "/friends[person]", "/{friend.name.startswith}", "/{friend.name}/:field", "/{friend.name}/pets[pet.owner]", "/{friend.name}/pet[pet.owner]/{pet.name}", "/{friend.name}/pet[pet.owner]/{pet.name}/:field" ] parser = db.parse_as_rest(patterns,args,vars) if parser.status == 200: return dict(content=parser.response) else: raise HTTP(parser.status,parser.error) def POST(table_name,**vars): if table_name == 'person': return db.person.validate_and_insert(**vars) elif table_name == 'pet': return db.pet.validate_and_insert(**vars) else: raise HTTP(400) return locals() """ import re from gluon.dal import Row db = self re1 = re.compile( '^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$' ) re2 = re.compile('^.+\[.+\]$') def auto_table(table, base='', depth=0): patterns = [] for field in db[table].fields: if base: tag = '%s/%s' % (base, field.replace('_', '-')) else: tag = '/%s/%s' % (table.replace( '_', '-'), field.replace('_', '-')) f = db[table][field] if not f.readable: continue if f.type == 'id' or 'slug' in field or f.type.startswith( 'reference'): tag += '/{%s.%s}' % (table, field) patterns.append(tag) patterns.append(tag + '/:field') elif f.type.startswith('boolean'): tag += '/{%s.%s}' % (table, field) patterns.append(tag) patterns.append(tag + '/:field') elif f.type.startswith('double') or f.type.startswith( 'integer'): tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table, field, table, field) patterns.append(tag) patterns.append(tag + '/:field') elif f.type.startswith('list:'): tag += '/{%s.%s.contains}' % (table, field) patterns.append(tag) patterns.append(tag + '/:field') elif f.type in ('date', 'datetime'): tag += '/{%s.%s.year}' % (table, field) patterns.append(tag) patterns.append(tag + '/:field') tag += '/{%s.%s.month}' % (table, field) patterns.append(tag) patterns.append(tag + '/:field') tag += '/{%s.%s.day}' % (table, field) patterns.append(tag) patterns.append(tag + '/:field') if f.type in ('datetime', 'time'): tag += '/{%s.%s.hour}' % (table, field) patterns.append(tag) patterns.append(tag + '/:field') tag += '/{%s.%s.minute}' % (table, field) patterns.append(tag) patterns.append(tag + '/:field') tag += '/{%s.%s.second}' % (table, field) patterns.append(tag) patterns.append(tag + '/:field') if depth > 0: for rtable, rfield in db[table]._referenced_by: tag += '/%s[%s.%s]' % (rtable, rtable, rfield) patterns.append(tag) patterns += auto_table(rtable, base=tag, depth=depth - 1) return patterns if patterns == 'auto': patterns = [] for table in db.tables: if not table.startswith('auth_'): patterns.append('/%s[%s]' % (table, table)) patterns += auto_table(table, base='', depth=1) else: i = 0 while i < len(patterns): pattern = patterns[i] tokens = pattern.split('/') if tokens[-1].startswith(':auto') and re2.match(tokens[-1]): new_patterns = auto_table( tokens[-1][tokens[-1].find('[') + 1:-1], '/'.join(tokens[:-1])) patterns = patterns[:i] + new_patterns + patterns[i + 1:] i += len(new_patterns) else: i += 1 if '/'.join(args) == 'patterns': return Row({ 'status': 200, 'pattern': 'list', 'error': None, 'response': patterns }) for pattern in patterns: otable = table = None if not isinstance(queries, dict): dbset = db(queries) i = 0 tags = pattern[1:].split('/') if len(tags) != len(args): continue for tag in tags: if re1.match(tag): # print 're1:'+tag tokens = tag[1:-1].split('.') table, field = tokens[0], tokens[1] if not otable or table == otable: if len(tokens) == 2 or tokens[2] == 'eq': query = db[table][field] == args[i] elif tokens[2] == 'ne': query = db[table][field] != args[i] elif tokens[2] == 'lt': query = db[table][field] < args[i] elif tokens[2] == 'gt': query = db[table][field] > args[i] elif tokens[2] == 'ge': query = db[table][field] >= args[i] elif tokens[2] == 'le': query = db[table][field] <= args[i] elif tokens[2] == 'year': query = db[table][field].year() == args[i] elif tokens[2] == 'month': query = db[table][field].month() == args[i] elif tokens[2] == 'day': query = db[table][field].day() == args[i] elif tokens[2] == 'hour': query = db[table][field].hour() == args[i] elif tokens[2] == 'minute': query = db[table][field].minutes() == args[i] elif tokens[2] == 'second': query = db[table][field].seconds() == args[i] elif tokens[2] == 'startswith': query = db[table][field].startswith(args[i]) elif tokens[2] == 'contains': query = db[table][field].contains(args[i]) else: raise RuntimeError, "invalid pattern: %s" % pattern if len(tokens) == 4 and tokens[3] == 'not': query = ~query elif len(tokens) >= 4: raise RuntimeError, "invalid pattern: %s" % pattern if not otable and isinstance(queries, dict): dbset = db(queries[table]) dbset = dbset(query) else: raise RuntimeError, "missing relation in pattern: %s" % pattern elif re2.match(tag) and args[i] == tag[:tag.find('[')]: ref = tag[tag.find('[') + 1:-1] if '.' in ref and otable: table, field = ref.split('.') # print table,field if nested_select: try: dbset = db(db[table][field].belongs( dbset._select(db[otable]._id))) except ValueError: return Row({ 'status': 400, 'pattern': pattern, 'error': 'invalid path', 'response': None }) else: items = [ item.id for item in dbset.select(db[otable]._id) ] dbset = db(db[table][field].belongs(items)) else: table = ref if not otable and isinstance(queries, dict): dbset = db(queries[table]) dbset = dbset(db[table]) elif tag == ':field' and table: # # print 're3:'+tag field = args[i] if not field in db[table]: break try: item = dbset.select(db[table][field], limitby=(0, 1)).first() except ValueError: return Row({ 'status': 400, 'pattern': pattern, 'error': 'invalid path', 'response': None }) if not item: return Row({ 'status': 404, 'pattern': pattern, 'error': 'record not found', 'response': None }) else: return Row({ 'status': 200, 'response': item[field], 'pattern': pattern }) elif tag != args[i]: break otable = table i += 1 if i == len(tags) and table: ofields = vars.get('order', db[table]._id.name).split('|') try: orderby = [ db[table][f] if not f.startswith('~') else ~db[table][f[1:]] for f in ofields ] except KeyError: return Row({ 'status': 400, 'error': 'invalid orderby', 'response': None }) fields = [field for field in db[table] if field.readable] count = dbset.count() try: offset = int(vars.get('offset', None) or 0) limits = (offset, int(vars.get('limit', None) or 1000) + offset) except ValueError: Row({ 'status': 400, 'error': 'invalid limits', 'response': None }) if count > limits[1] - limits[0]: Row({ 'status': 400, 'error': 'too many records', 'response': None }) try: if custom_select: response = custom_select(dbset, limitby=limits, orderby=orderby, *fields) else: response = dbset.select(limitby=limits, orderby=orderby, *fields) except ValueError: return Row({ 'status': 400, 'pattern': pattern, 'error': 'invalid path', 'response': None }) return Row({ 'status': 200, 'response': response, 'pattern': pattern }) return Row({ 'status': 400, 'error': 'no matching pattern', 'response': None })