def signers_who_did_not_sign(self): result = DAList() result.set_random_instance_name() for code, info in self.info_by_code.items(): if not info['signed']: result.append(info['signer']) return result
def filter(cls, instance_name, **kwargs): if 'dbcache' not in this_thread.misc: this_thread.misc['dbcache'] = {} listobj = DAList(instance_name, object_type=cls, auto_gather=False) filters = [] for key, val in kwargs.items(): if not hasattr(cls._model, key): raise Exception("filter: class " + cls.__name__ + " does not have column " + key) filters.append(getattr(cls._model, key) == val) for db_entry in list( cls._session.query(cls._model).filter(*filters).order_by( cls._model.id).all()): if cls._model.__name__ in this_thread.misc[ 'dbcache'] and db_entry.id in this_thread.misc['dbcache'][ cls._model.__name__]: listobj.append(this_thread.misc['dbcache'][cls._model.__name__] [db_entry.id]) else: obj = listobj.appendObject() obj.id = db_entry.id db_values = {} for column in cls._model.__dict__.keys(): if column == 'id' or column.startswith('_'): continue db_values[column] = getattr(db_entry, column) if db_values[column] is not None: obj.db_set(column, db_values[column]) obj._orig = db_values obj.db_cache() listobj.gathered = True return listobj
def get_parent(self, rel_name, instance_name=None): if not self.ready(): raise Exception("get_parent: cannot retrieve data") info = self._parent_mapping[rel_name] model = info['relationship_class']._model if instance_name: results = DAList(instance_name, object_type=info['parent_class']) else: results = [] indexno = 0 if instance_name is None: for db_entry in list( self._session.query(model).filter( model.getattr(info['child_column']) == self.id).all()): results.append(info['parent_class'].by_id( db_entry.getattr(info['parent_column']))) else: for db_entry in list( self._session.query(model).filter( model.getattr(info['child_column']) == self.id).all()): results.append(info['parent_class'].by_id( db_entry.getattr(info['parent_column'])), instance_name=instance_name + '[' + str(indexno) + ']') indexno += 1 return results
def all(cls, instance_name=None): if 'dbcache' not in this_thread.misc: this_thread.misc['dbcache'] = {} if instance_name: listobj = DAList(instance_name, object_type=cls) else: listobj = DAList(object_type=cls) listobj.set_random_instance_name() for db_entry in list( cls._session.query(cls._model).order_by(cls._model.id).all()): if cls._model.__name__ in this_thread.misc[ 'dbcache'] and db_entry.id in this_thread.misc['dbcache'][ cls._model.__name__]: listobj.append(this_thread.misc['dbcache'][cls._model.__name__] [db_entry.id]) else: obj = listobj.appendObject() obj.id = db_entry.id db_values = {} for column in cls._model.__dict__.keys(): if column == 'id' or column.startswith('_'): continue db_values[column] = getattr(db_entry, column) if db_values[column] is not None: obj.db_set(column, db_values[column]) obj._orig = db_values obj.db_cache() listobj.gathered = True return listobj
def cities_near(org, person): offices = offices_for(org) person.address.geolocate() if not person.address.geolocate_success: raise Exception('cities_near: failure to geolocate address') cities = DAList(gathered=True) cities.set_random_instance_name() for y in sorted(offices, key=lambda y: distance_between(person.address, y)): if y.city not in cities: cities.append(y.city) cities.gathered = True return cities
class MJFClientData(DAObject): """ This class is simply an object that populates and stores a list of invitees. Invitees are stored in the public `self.clients_list` """ def init(self, *pargs, **kwargs): """ Not sure why this is called `init` and not `__init__` but this seems how DA objects are initialised. Parameters ---------- Standard params for initialising `super()` """ super().init(*pargs, **kwargs) self.clients_list = DAList() self.clients_list.auto_gather = False self.populated = False def read_in_data(self, data_file): """ Reads data from an excel file and populates the internal DAList Use Pandas to read the spreadsheet into a data map Parameters ---------- data_file : DAFileList() with a single file """ file_path = data_file.path() self._data_map = pd.read_excel(file_path) self.populate_client_data() self.populated = True def populate_client_data(self): # Will only return required fields # TODO: field sanity checking # a DAList of DADicts ie: # - first_name: # last_name: # email: # organisation: # - first_name # ... self.clients_list.clear() self.clients_list.auto_gather = False for index, row in self._data_map.iterrows(): result_item = DADict() result_item['first_name'] = row['First Name'] result_item['last_name'] = row['Last Name'] result_item['email'] = row['Email'] result_item['app_name'] = row['Name of App'] result_item['app_link'] = row['Link to App'] result_item['video_link'] = row['Link to Video'] # Not using this right now #result_item['licence_agreement'] = row['Licence Agreement'] self.clients_list.append(result_item) def get_populated(self): return self._populated
def list_of_signers(self): result = DAList() result.set_random_instance_name() for code, info in self.info_by_code.items(): result.append(info['signer']) return result