def all(cls, instance_name=None): if 'dbcache' not in this_thread.misc: this_thread.misc['dbcache'] = {} if instance_name: listobj = DAList(instance_name, object_type=cls) else: listobj = DAList(object_type=cls) listobj.set_random_instance_name() for db_entry in list( cls._session.query(cls._model).order_by(cls._model.id).all()): if cls._model.__name__ in this_thread.misc[ 'dbcache'] and db_entry.id in this_thread.misc['dbcache'][ cls._model.__name__]: listobj.append(this_thread.misc['dbcache'][cls._model.__name__] [db_entry.id]) else: obj = listobj.appendObject() obj.id = db_entry.id db_values = {} for column in cls._model.__dict__.keys(): if column == 'id' or column.startswith('_'): continue db_values[column] = getattr(db_entry, column) if db_values[column] is not None: obj.db_set(column, db_values[column]) obj._orig = db_values obj.db_cache() listobj.gathered = True return listobj
def unclassified_entries(self, key=None): self._initialize() results = DAList()._set_instance_name_for_method() results.gathered = True if key is None: query = db.session.execute( select(MachineLearning).filter_by( group_id=self.group_id, active=False).order_by(MachineLearning.id)).scalars() else: query = db.session.execute( select(MachineLearning).filter_by( group_id=self.group_id, key=key, active=False).order_by(MachineLearning.id)).scalars() for entry in query: results.appendObject( MachineLearningEntry, ml=self, id=entry.id, independent=fix_pickle_obj( codecs.decode( bytearray(entry.independent, encoding='utf-8'), 'base64')), create_time=entry.create_time, key=entry.key, info=fix_pickle_obj( codecs.decode(bytearray(entry.info, encoding='utf-8'), 'base64')) if entry.info is not None else None) return results
def offices_for(org, by_proximity_to=None): if org is None: return None params = copy.copy(office_base_params) params['where'] = "recipID={}".format(org.rin) r = requests.get(office_base_url, params=params) if r.status_code != 200: raise Exception( 'offices_for: got error code {} from ArcGIS. Response: {}'.format( r.status_code, r.text)) result = r.json() offices = DAList(object_type=Address) offices.set_random_instance_name() for office_data in result['features']: attribs = office_data['attributes'] office = offices.appendObject() office.address = attribs['address'].strip() office.city = attribs['City'].strip() office.state = attribs['State'].strip() office.zip = attribs['ZIP'].strip() office.location.longitude = attribs['Longitude'] office.location.latitude = attribs['Latitude'] office.office_type = attribs['officetype'].strip() if attribs['bldgSuite']: office.unit = attribs['bldgSuite'].strip() if by_proximity_to: office.distance = distance_between(by_proximity_to.address, office) offices.gathered = True if by_proximity_to: by_proximity_to.address.geolocate() if not by_proximity_to.address.geolocate_success: raise Exception('offices_for: failure to geolocate address') offices.elements = sorted(offices.elements, key=lambda y: y.distance) offices._reset_instance_names() return offices
def init(self, *pargs, **kwargs): """ Not sure why this is called `init` and not `__init__` but this seems how DA objects are initialised. Parameters ---------- Standard params for initialising `super()` """ super().init(*pargs, **kwargs) self.clients_list = DAList() self.clients_list.auto_gather = False self.populated = False
def signers_who_did_not_sign(self): result = DAList() result.set_random_instance_name() for code, info in self.info_by_code.items(): if not info['signed']: result.append(info['signer']) return result
def filter(cls, instance_name, **kwargs): if 'dbcache' not in this_thread.misc: this_thread.misc['dbcache'] = {} listobj = DAList(instance_name, object_type=cls, auto_gather=False) filters = [] for key, val in kwargs.items(): if not hasattr(cls._model, key): raise Exception("filter: class " + cls.__name__ + " does not have column " + key) filters.append(getattr(cls._model, key) == val) for db_entry in list( cls._session.query(cls._model).filter(*filters).order_by( cls._model.id).all()): if cls._model.__name__ in this_thread.misc[ 'dbcache'] and db_entry.id in this_thread.misc['dbcache'][ cls._model.__name__]: listobj.append(this_thread.misc['dbcache'][cls._model.__name__] [db_entry.id]) else: obj = listobj.appendObject() obj.id = db_entry.id db_values = {} for column in cls._model.__dict__.keys(): if column == 'id' or column.startswith('_'): continue db_values[column] = getattr(db_entry, column) if db_values[column] is not None: obj.db_set(column, db_values[column]) obj._orig = db_values obj.db_cache() listobj.gathered = True return listobj
def get_parent(self, rel_name, instance_name=None): if not self.ready(): raise Exception("get_parent: cannot retrieve data") info = self._parent_mapping[rel_name] model = info['relationship_class']._model if instance_name: results = DAList(instance_name, object_type=info['parent_class']) else: results = [] indexno = 0 if instance_name is None: for db_entry in list( self._session.query(model).filter( model.getattr(info['child_column']) == self.id).all()): results.append(info['parent_class'].by_id( db_entry.getattr(info['parent_column']))) else: for db_entry in list( self._session.query(model).filter( model.getattr(info['child_column']) == self.id).all()): results.append(info['parent_class'].by_id( db_entry.getattr(info['parent_column'])), instance_name=instance_name + '[' + str(indexno) + ']') indexno += 1 return results
def offices_for(org): if org is None: return None params = copy.copy(office_base_params) params['where'] = "recipID={}".format(org.rin) r = requests.get(office_base_url, params=params) if r.status_code != 200: raise Exception( 'offices_for: got error code {} from ArcGIS. Response: {}'.format( r.status_code, r.text)) result = r.json() offices = DAList(object_type=Address) offices.set_random_instance_name() for office_data in result['features']: attribs = office_data['attributes'] office = offices.appendObject() office.address = attribs['address'].strip() office.city = attribs['City'].strip() office.state = attribs['State'].strip() office.zip = attribs['ZIP'].strip() office.location.longitude = attribs['Longitude'] office.location.latitude = attribs['Latitude'] office.office_type = attribs['officetype'].strip() if attribs['bldgSuite']: office.unit = attribs['bldgSuite'].strip() offices.gathered = True return offices
def in_category_not_subcategory(self, category_id, check_access=False, user=None): filtered = [ form for form in self.elements if hasattr(form, 'fields') and ( str(form.fields.get('Category:ID')) == str(category_id)) and ( not form.fields.get('Subcategory:ID')) ] if len(filtered): return filtered else: return DAList(there_are_any=False)
def cities_near(org, person): offices = offices_for(org) person.address.geolocate() if not person.address.geolocate_success: raise Exception('cities_near: failure to geolocate address') cities = DAList(gathered=True) cities.set_random_instance_name() for y in sorted(offices, key=lambda y: distance_between(person.address, y)): if y.city not in cities: cities.append(y.city) cities.gathered = True return cities
class MJFClientData(DAObject): """ This class is simply an object that populates and stores a list of invitees. Invitees are stored in the public `self.clients_list` """ def init(self, *pargs, **kwargs): """ Not sure why this is called `init` and not `__init__` but this seems how DA objects are initialised. Parameters ---------- Standard params for initialising `super()` """ super().init(*pargs, **kwargs) self.clients_list = DAList() self.clients_list.auto_gather = False self.populated = False def read_in_data(self, data_file): """ Reads data from an excel file and populates the internal DAList Use Pandas to read the spreadsheet into a data map Parameters ---------- data_file : DAFileList() with a single file """ file_path = data_file.path() self._data_map = pd.read_excel(file_path) self.populate_client_data() self.populated = True def populate_client_data(self): # Will only return required fields # TODO: field sanity checking # a DAList of DADicts ie: # - first_name: # last_name: # email: # organisation: # - first_name # ... self.clients_list.clear() self.clients_list.auto_gather = False for index, row in self._data_map.iterrows(): result_item = DADict() result_item['first_name'] = row['First Name'] result_item['last_name'] = row['Last Name'] result_item['email'] = row['Email'] result_item['app_name'] = row['Name of App'] result_item['app_link'] = row['Link to App'] result_item['video_link'] = row['Link to Video'] # Not using this right now #result_item['licence_agreement'] = row['Licence Agreement'] self.clients_list.append(result_item) def get_populated(self): return self._populated
def list_of_signers(self): result = DAList() result.set_random_instance_name() for code, info in self.info_by_code.items(): result.append(info['signer']) return result
def in_subcategory(self, subcategory_id, check_access=False, user=None): return [ form for form in self.elements if hasattr(form, 'fields') and form.fields.get('Subcategory:ID', False) and str(form.fields.get('Subcategory:ID')) == str(subcategory_id) ] or DAList(there_are_any=False)