class User(Feature): '''Class representing AIMS user''' global aimslog aimslog = Logger.setup() def __init__(self, ref=None): '''Initialise User object @param ref: Unique reference string ''' #aimslog.info('AdrRef.{}'.format(ref)) self._ref = ref def __str__(self): return 'USR.{}.{}'.format(self._ref, self._userId) def setUserId(self, userId): self._userId = userId def getUserId(self): return self._userId def setEmail(self, email): self._email = email if Feature._vEmail(email) else None
class AddressFactory(FeatureFactory): ''' AddressFactory class used to build address objects without the overhead of re-reading templates each time an address is needed''' PBRANCH = '{d}{}{d}{}'.format(d=DEF_SEP, *Position.BRANCH) AFFT = FeedType.FEATURES DEF_FRT = FeedType.reverse[AFFT] addrtype = Address reqtype = None global aimslog aimslog = Logger.setup() def __init__(self, frt=DEF_FRT): '''Initialises an address factory with static templates. @param frt: FeedRef template reference used to select factory build @type frt: String ''' if frt.k in TP.keys(): self.frt = frt else: raise AddressTemplateReferenceException( '{} is not a template key'.format(frt)) self.template = self.readTemplate(TP)[self.frt.k] def __str__(self): return 'AFC.{}'.format(FeedType.reverse(self.AFFT)[:3]) def get(self, ref=None, adr=None, model=None, prefix=''): '''Creates an address object from a model (using the response template if model is not provided) @param ref: Application generated unique reference string @type ref: String @param adr: Address object being populated @type adr: Address @param model: Dictionary object (matching or derived from a template) containing address attribute information @param prefix: String value used to flatten/identify nested dictionary elements @type prefix: String @return: (Minimally) Populated address object ''' #overwrite = model OR NOT(address). If an address is provided only fill it with model provided, presume dont want template fill overwrite = False if not adr: overwrite = True adr = self.addrtype(ref) if model: data = model overwrite = True else: data = self.template['response'] if overwrite: try: #if SKIP_NULL: data = self._delNull(data) #breaks position on template, coords[0,0]->None adr = self._read(adr, data, prefix) except Exception as e: msg = 'Error creating address object using model {} with message "{}"'.format( data, e) raise AddressCreationException(msg) return adr def _read(self, adr, data, prefix): '''Recursive address setting attribute dict reader. @param adr: Active address object @type adr: Address @param data: Active (subset) dict @param prefix: String value used to flatten/identify nested dictionary elements @type prefix: String @return: Active (partially filled) address ''' for k in data: setter = 'set' + k[0].upper() + k[1:] new_prefix = prefix + DEF_SEP + k if isinstance(data[k], dict): adr = self._read(adr=adr, data=data[k], prefix=new_prefix) elif isinstance(data[k], list) and new_prefix == self.PBRANCH: pstns = [] for pd in data[k]: pstns.append(Position.getInstance(pd, self)) adr.setAddressPositions(pstns) else: getattr(adr, setter)(self.filterPI(data[k]) or None) if hasattr( adr, setter) else setattr( adr, new_prefix, self.filterPI(data[k]) or None) return adr def cast(self, adr): '''Casts address from current type to requested address-type, eg AddressFeature -> AddressChange @param adr: Address being converted @type adr: Address @return: Address cast to the self type ''' return Address.clone(adr, self.get())
# LICENSE file for more information. # ################################################################################ import httplib2 import json import re from Address import Address, AddressChange, AddressResolution #,AimsWarning from Config import ConfigReader from AimsUtility import FeatureType, ActionType, ApprovalType, GroupActionType, GroupApprovalType, UserActionType, FeedType, LogWrap, FeedRef, SupplementalHack from AimsUtility import AimsException from Const import MAX_FEATURE_COUNT, TEST_MODE from AimsLogging import Logger aimslog = Logger.setup() TESTPATH = 'test' if TEST_MODE else '' class AimsHttpException(AimsException): def __init__(self, em, ll=aimslog.error): ll('{} - {}'.format(type(self).__name__, em)) class Http404Exception(AimsHttpException): pass class Http400Exception(AimsHttpException): pass
''' import unittest import inspect import sys import re import sys import os sys.path.append('../AIMSDataManager/') from Address import Address, AddressChange, AddressResolution, Position from AddressFactory import AddressChangeFactory, AddressResolutionFactory from AimsUtility import ActionType from AimsLogging import Logger testlog = Logger.setup('test') #user to init an address, simple text string read from a stored config file user_text = 'aims_user' class Test_0_AddressSelfTest(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test10_selfTest(self): #assertIsNotNone added in 3.1 self.assertNotEqual(testlog, None, 'Testlog not instantiated')
class FeatureFactory(object): '''Factory class for Feature objects but used as super class with construction management duties. Saves overhead on template reading each time a feature is needed''' PBRANCH = '{d}{}{d}{}'.format(d=DEF_SEP, *Position.BRANCH) AFFT = FeedType.FEATURES DEF_FRT = FeedType.reverse[AFFT] addrtype = Address reqtype = None #RP = eval(RES_PATH) RP = os.path.join(os.path.dirname(__file__), '..', RES_PATH) global aimslog aimslog = Logger.setup() @staticmethod def getInstance(etft): '''Gets an instance of a factory to generate a particular FeedRef type of object @param etft: FeeedRefobject describing required featuretype, feedtype object required @type etft: FeedRef @return: Feature ''' #NOTE. Double duty for ft, consider (et,ft) - since enums are just ints et.g=ft.f if etft.et == FeatureType.GROUPS: from GroupFactory import GroupChangeFactory, GroupResolutionFactory if etft.ft == FeedType.CHANGEFEED: return GroupChangeFactory(etft) elif etft.ft == FeedType.RESOLUTIONFEED: return GroupResolutionFactory(etft) else: raise InvalidEnumerationType( 'FeedType {} not available'.format(etft)) elif etft.et == FeatureType.ADDRESS: from AddressFactory import AddressFactory, AddressChangeFactory, AddressResolutionFactory if etft.ft == FeedType.FEATURES: return AddressFactory(etft) elif etft.ft == FeedType.CHANGEFEED: return AddressChangeFactory(etft) elif etft.ft == FeedType.RESOLUTIONFEED: return AddressResolutionFactory(etft) else: raise InvalidEnumerationType( 'FeedType {} not available'.format(etft.ft)) elif etft.et == FeatureType.USERS: from UserFactory import UserFactory if etft.ft == FeedType.ADMIN: return UserFactory(etft) else: raise InvalidEnumerationType( 'FeedType {} not available'.format(etft.ft)) else: raise InvalidEnumerationType('FeatureType {} not available'.format( etft.et)) @staticmethod def filterPI(ppi): '''Filters out Processing Instructions from template declaration @param ppi: Processing instruction text @type ppi: String @return: List if possible attributes or a default attribute ''' sppi = ppi.encode('utf8') if hasattr(ppi, 'find') else str(ppi) if sppi.find('#') > -1: dflt = re.search('default=(\w+)', sppi) oneof = re.search('oneof=(\w+)', sppi) #first as default return dflt.group(1) if dflt else ( oneof.group(1) if oneof else None) return ppi @staticmethod def readTemplate(tp): '''Reads and parses template file returning attribute dict @param tp: Dict of all feed type + feature type combinations used to construct template filenames @type tp: Dict of attributes for all Feature/Feed type combinations @return: Dict representing templates for JSON AIMS request/response ''' for t1 in tp: for t2 in tp[t1]: with open( os.path.join(FeatureFactory.RP, '{}.{}.template'.format(t1, t2)), 'r') as handle: tstr = handle.read() #print 'read template',t1t,t2t tp[t1][t2] = eval(tstr) if tstr else '' #response address type is the template of the address-json we get from the api with open( os.path.join(FeatureFactory.RP, '{}.response.template'.format(t1)), 'r') as handle: tstr = handle.read() tp[t1]['response'] = eval(tstr) if tstr else '' return tp @staticmethod def _delNull(obj): '''Removes Null/empty attributes from dict of attributes @param obj: Object to strip of null values @return: Stripped down object ''' if hasattr(obj, 'items'): new_obj = type(obj)() for k in obj: #if k != 'NULL' and obj[k] != 'NULL' and obj[k] != None: if k and obj[k]: res = FeatureFactory._delNull(obj[k]) if res: new_obj[k] = res elif hasattr(obj, '__iter__'): new_obj = [] for it in obj: #if it != 'NULL' and it != None: if it: new_obj.append(FeatureFactory._delNull(it)) else: return obj return type(obj)(new_obj)
# ################################################################################ import httplib2 import json import re from Address import Address,AddressChange,AddressResolution#,AimsWarning from Config import ConfigReader from AimsUtility import FeatureType,ActionType,ApprovalType,GroupActionType,GroupApprovalType,UserActionType,FeedType,LogWrap,FeedRef,SupplementalHack from AimsUtility import AimsException from Const import MAX_FEATURE_COUNT,TEST_MODE from AimsLogging import Logger aimslog = Logger.setup() TESTPATH = 'test' if TEST_MODE else '' class AimsHttpException(AimsException): def __init__(self,em,ll=aimslog.error): ll('{} - {}'.format(type(self).__name__,em)) class Http404Exception(AimsHttpException): pass class Http400Exception(AimsHttpException): pass class AimsApi(object): ''' make and receive all http requests / responses to AIMS API ''' #global aimslog #aimslog = Logger.setup()
''' import unittest import inspect import sys import re import sys import os sys.path.append('../AIMSDataManager/') from Address import Address,AddressChange,AddressResolution,Position from AddressFactory import AddressChangeFactory,AddressResolutionFactory from AimsUtility import ActionType from AimsLogging import Logger testlog = Logger.setup('test') #user to init an address, simple text string read from a stored config file user_text = 'aims_user' class Test_0_AddressSelfTest(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test10_selfTest(self): #assertIsNotNone added in 3.1 self.assertNotEqual(testlog,None,'Testlog not instantiated')
class DataSync(Observable): '''Background thread triggering periodic data updates and synchronising update requests from DM.''' global aimslog aimslog = Logger.setup() duinst = {} #hash to compare latest fetched data #from DataManager import FEEDS #data_hash = {dh:0 for dh in DataManager.FEEDS} sw,ne = None,None def __init__(self,params,queues): '''Initialise new DataSync object splitting out config parameters @param params: List of configuration parameters @type params: List<?> @param queues: List of IOR queues @type queues: Dict<String,Queue.Queue> ''' #from DataManager import FEEDS super(DataSync,self).__init__() #thread reference, ft to AD/CF/RF, config info self.start_time = time.time() self.updater_running = False self.ref,self.etft,self.ftracker,self.conf = params self.data_hash = {dh:0 for dh in FEEDS.values()} self.factory = FeatureFactory.getInstance(self.etft) self.updater = DataUpdater.getInstance(self.etft) # unevaluated class self.inq = queues['in'] self.outq = queues['out'] self.respq = queues['resp'] #self._stop = threading.Event() def setup(self,sw=None,ne=None): '''Parameter setup for coordinate feature requests. @param sw: South-West corner, coordinate value pair (optional) @type sw: List<Double>{2} @param ne: North-East corner, coordinate value pair (optional) @type ne: List<Double>{2} ''' self.sw,self.ne = sw,ne def run(self): '''Continual loop running periodic feed fetch updates''' while not self.stopped(): if not self.updater_running: self.fetchFeedUpdates(self.ftracker['threads']) time.sleep(self.ftracker['interval']) #@override def stop(self): '''Thread stop override to also stop subordinate threads''' #brutal stop on du threads for du in self.duinst.values(): du.stop() self._stop.set() def close(self): '''Alias of stop''' self.stop() #self.inq.task_done() #self.outq.task_done() def observe(self,_,*args, **kwargs): '''Overridden observe method calling thread management function @params _: Discarded observable @param *args: Wrapped args, where we only use the first arg as the managePage ref value @param **kwargs: Wrapped kwargs, discarded ''' if not self.stopped(): self._managePage(args[0]) def _managePage(self,ref): '''Thread management function called when a periodic thread ends, posting new data and starting a new thread in pool if required @param ref: Unique reference string @type ref: String ''' #print '{}{} finished'.format(FeedType.reverse[self.ft][:2].capitalize(),r['page']) aimslog.info('Extracting queue for DU pool {}'.format(ref)) #print [r['ref'] for r in self.pool] #print 'extracting queue for DU pool {}'.format(ref) r = None with pool_lock: aimslog.info('{} complete'.format(ref)) #print 'POOLSTATE',self.pool #print 'POOLREMOVE',ref r = [x for x in self.pool if x['ref']==ref][0] alist = self.duinst[ref].queue.get() acount = len(alist) self.newaddr += alist nextpage = max([r2['page'] for r2 in self.pool])+1 #del self.duinst[ref]#ERROR? this cant be good, removing the DU during its own call to notify aimslog.debug('PAGE TIME {} {}s'.format(ref,time.time()-r['time'])) #print 'POOLTIME {} {}'.format(ref,time.time()-r['time']) self.pool.remove(r) #if N>0 features return, spawn another thread if acount<MAX_FEATURE_COUNT: #non-full page returned, must be the last one self.exhausted = r['page'] if acount>0: #features returned, not zero and not less than max so get another self.lastpage = max(r['page'],self.lastpage) if nextpage<self.exhausted: ref = self._monitorPage(nextpage) self.pool.append({'page':nextpage,'ref':ref,'time':time.time()}) #print 'POOLADD 2',ref else: pass #print 'No addresses found in page {}{}'.format(FeedType.reverse[self.ft][:2].capitalize(),r['page']) if len(self.pool)==0: self.syncFeeds(self.newaddr)#syncfeeds does notify DM self.managePage((None,self.lastpage)) aimslog.debug('FULL TIME {} took {}s'.format(ref,time.time()-self.start_time)) self.updater_running = False #print 'POOLCLOSE',ref,time.strftime('%Y-%M-%d %H:%m:%S') #-------------------------------------------------------------------------- #@LogWrap.timediff def fetchFeedUpdates(self,thr,lastpage=FIRST_PAGE): '''Main feed updater method @param thr: Number of updater threads to spawn into pool, this can be negative to count back from a lastpage value @type thr; Integer @param lastpage: Initial page number for feed requests @type lastpage: Integer ''' self.updater_running = True self.exhausted = PAGE_LIMIT self.lastpage = lastpage self.newaddr = [] #print 'LP {} {}->{}'.format(FeedType.reverse[self.ft][:2].capitalize(),lastpage,lastpage+thr) with pool_lock: self.pool = self._buildPool(lastpage,thr) def _buildPool(self,lastpage,thr): '''Builds a pool based on page spec provided, accepts negative thresholds for backfill requests @param thr: Number of updater threads to spawn into pool, this can be negative to count back from a lastpage value @type thr; Integer @param lastpage: Initial page number for feed requests @type lastpage: Integer ''' span = range(min(lastpage,lastpage+thr),max(lastpage,lastpage+thr)) return [{'page':pno,'ref':self._monitorPage(pno),'time':time.time()} for pno in span] def _monitorPage(self,pno): '''Initialise and store a DataUpdater instance for a single page request @param pno: Page number to build DataUpdater request from @type pno: Integer @return: DataUpdater reference value ''' ref = 'FP.{0}.Page{1}.{2:%y%m%d.%H%M%S}.p{3}'.format(self.etft,pno,DT.now(),pno) aimslog.info('init DU {}'.format(ref)) self.duinst[ref] = self._fetchPage(ref,pno) self.duinst[ref].register(self) self.duinst[ref].start() return ref def _fetchPage(self,ref,pno): '''Build DataUpdate instance @param ref: Unique reference string @type ref: String @param pno: Page number to build DataUpdater request from @type pno: Integer @return: DataUpdater ''' params = (ref,self.conf,self.factory) adrq = Queue.Queue() pager = self.updater(params,adrq) #address/feature requests called with bbox parameters if self.etft==FEEDS['AF']: pager.setup(self.etft,self.sw,self.ne,pno) else: pager.setup(self.etft,None,None,pno) pager.setName(ref) pager.setDaemon(True) return pager #NOTE. To override the behaviour, return feed once full, override this method RLock def syncFeeds(self,new_addresses): '''Checks if supplied addresses are different from a saved existing set and return in the out queue, with notification @param new_addresses: List of all fetched pages from a full feed request, spanning all pages @type new_addresses: List<Feature> ''' #new_hash = hash(frozenset(new_addresses)) new_hash = hash(frozenset([na.getHash() for na in new_addresses])) if self.data_hash[self.etft] != new_hash: #print '>>> Changes in {} hash\n{}\n{}'.format(self.etft,self.data_hash[self.etft],new_hash) self.data_hash[self.etft] = new_hash #with sync_lock: self.outq.put(new_addresses) self.outq.task_done() self.notify(self.etft) #-------------------------------------------------------------------------- def returnResp(self,resp): '''I{DEPRECATED} Function to put response objects in DataSync response queue @param resp: Response object, @type resp: Feature ''' aimslog.info('RESP.{}'.format(resp)) self.respq.put(resp) #null method for features since page count not saved def managePage(self,p):pass
class DataManager(Observable): '''Initialises maintenance thread and provides queue accessors and request channels''' global aimslog aimslog = Logger.setup() def __init__(self,start=FIRST,initialise=False): '''Initialises DataManager initialising DataSync objects, setting up persistence and reading configuration. @param start: List of sync objects to be started (excluded FeatureFeed by default until BBOX defined @param initialise: Flag to signal initialisation of persisted objects @type initialise: Boolean ''' #self.ioq = {'in':Queue.Queue(),'out':Queue.Queue()} super(DataManager,self).__init__() if start and hasattr(start,'__iter__'): self._start = start.values() self.persist = Persistence(initialise) self.conf = Configuration().readConf() self._initDS() def _initDS(self): '''Initialises all DataSync, Queue and timestamping containers and begin check process''' self.ioq = {etft:None for etft in FEEDS.values()} self.ds = {etft:None for etft in FEEDS.values()} self.stamp = {etft:time.time() for etft in FEEDS.values()} #init the g2+a2+a1 different feed threads self.dsr = {f:DataSyncFeeds for f in FIRST.values()} self.dsr[FeedRef((FeatureType.ADDRESS,FeedType.FEATURES))] = DataSyncFeatures #Users feed not included in DSR for Release1.0 for etft in self._start: self._checkDS(etft) def startfeed(self,etft): '''Add a requested (FeedRef) thread to startup list and initialise @param etft: FeedRef of requested thread @type etft: FeedRef ''' if not etft in self._start: self._start.append(etft) self._checkDS(etft) #local override of observe def observe(self, observable, *args, **kwargs): '''Local override of observe chaining observe calls without notification @param observable: Instance of the class calling the notification @param *args: Wrapped args @param **kwargs: Wrapped kwargs ''' if self.stopped(): aimslog.warn('Attempt to call stopped DM listener {}'.format(self.getName())) return aimslog.info('DM Listen A[{}], K[{}] - {}'.format(args,kwargs,observable)) args += (self._monitor(args[0]),)#observable),) #chained notify/listen calls if hasattr(self,'registered') and self.registered: self.registered.observe(observable, *args, **kwargs) self._check() #Second register/observer method for main calling class def registermain(self,reg): '''Register "single" object as the main listener, intended for DataManager calling class. @param reg: Registered (main) object ''' self.registered = reg if hasattr(reg, 'observe') else None def _checkDS(self,etft): '''Starts a sync thread unless its a address-features feed with a zero bbox @param etft: FeedRef of requested thread @type etft: FeedRef ''' if (etft == FEEDS['AF'] and self.persist.coords['sw'] == SWZERO and self.persist.coords['ne'] == NEZERO):# or int(self.persist.tracker[etft]['threads'])==0: self.ds[etft] = None else: self.ds[etft],self.ioq[etft] = self._spawnDS(etft,self.dsr[etft]) if etft.ft != FeedType.FEATURES: self.register(self.ds[etft].drc) self.ds[etft].register(self) #HACK to start DRC even if feed thread count is zero. If changefeed isn't running we may still want to send requests to it if int(self.persist.tracker[etft]['threads'])>0: self.ds[etft].start() else: self.ds[etft].drc.start() def _spawnDS(self,etft,feedclass): '''Spawn and return a new DS matching the etft @param etft: FeedRef of requested thread @type etft: FeedRef @param feedclass: Alias of Sync setup function @type feedclass: DataSync class alias @return: (DataSync, {IOR Queues}) ''' ts = '{0:%y%m%d.%H%M%S}'.format(DT.now()) params = ('DSF..{}.{ts}'.format(etft,ts=ts),etft,self.persist.tracker[etft],self.conf) #self.ioq[etft] = {n:Queue.Queue() for n in ('in','out','resp')} dq = {n:Queue.Queue() for n in ('in','out','resp')} ds = feedclass(params,dq) ds.setup(self.persist.coords['sw'],self.persist.coords['ne']) ds.setDaemon(True) ds.setName('DS{}'.format(etft)) return ds,dq def _cullDS(self,etft): '''Remove temporary queue and ds instances, this does the anti spawn @param etft: FeedRef of thread to stop @type etft: FeedRef ''' del self.ioq[etft] self.deregister(self.uads.drc) del self.uads def close(self): '''Shutdown, closing/stopping DataSync threads and persist current data''' for ds in self.ds.values(): if ds: ds.close() self.persist.write() def _check(self): '''Safety method to check if a DataSync thread has crashed and restart it''' for etft in self._start: if self._confirmstart(etft): aimslog.warn('DS thread {} absent, starting'.format(etft)) #del self.ds[etft] self._checkDS(etft) def _confirmstart(self,etft): '''Simple test to determine whether thread should be started or not - Tests: Max thread count non zero AND thread is not already running @param etft: FeedRef of requested thread test @type etft: FeedRef ''' return int(self.persist.tracker[etft]['threads'])>0 and not (self.ds.has_key(etft) and self.ds[etft] and self.ds[etft].isAlive()) #Client Access def setbb(self,sw=None,ne=None): '''Reset the saved bounding box on the current DataManager which triggers a complete refresh of the features address data. - Tests whether provided coordinates are nonzero and dont match existing saved BBOX coordinates - Function attempts to gracefully kill previously running features thread during THREAD_JOIN_TIMEOUT period @param sw: South-West corner, coordinate value pair (optional) @type sw: List<Double>{2} @param ne: North-East corner, coordinate value pair (optional) @type ne: List<Double>{2} ''' #TODO add move-threshold to prevent small moves triggering an update if self.persist.coords['sw'] != sw or self.persist.coords['ne'] != ne: #throw out the current features addresses etft = FEEDS['AF']#(FeatureType.ADDRESS,FeedType.FEATURES) self.persist.set(etft,None,pat=PersistActionType.INIT) #save the new coordinates self.persist.coords['sw'],self.persist.coords['ne'] = sw,ne #kill the old features thread if self.ds[etft] and self.ds[etft].isAlive(): aimslog.info('Attempting Features Thread STOP') self.ds[etft].stop() self.ds[etft].join(THREAD_JOIN_TIMEOUT) #TODO investigate thread non-stopping issues if self.ds[etft].isAlive(): aimslog.warn('SetBB Features. ! Thread JOIN timeout') del self.ds[etft] #reinitialise a new features DataSync #self._initFeedDSChecker(etft) self.startfeed(etft) #@Deprecated def restart(self,etft): '''I{DEPRECATED} Restart method provided for calling application to explicitly kill running feed threads. This was required by the plugin application when in single thread mode to clear up contention issues but discouraged due to unpredictable thread hanging problems. @param etft: FeedRef of requested restart thread @type etft: FeedRef ''' #NB UI feature request. aimslog.warn('WARNING {} Thread Restart requested'.format(etft)) if self.ds.has_key(etft) and self.ds[etft] and self.ds[etft].isAlive(): self.ds[etft].stop() self.ds[etft].join(THREAD_JOIN_TIMEOUT) if self.ds[etft].isAlive(): aimslog.warn('{} ! Thread JOIN timeout'.format(etft)) #del self.ds[etft] elif not isinstance(etft,FeedRef): aimslog.error('Invalid FeedRef on STOP request') else: aimslog.warn('Requested thread {} does not exist') self._check() def pull(self,etft=None): '''Return copy of the current list of Address objects (ADL). @param etft: Optional feedref arg indicating which feature class to return @type etft: FeedRef @return: Dictionary<FeedRef,List<Address>> ''' return self.persist.get(etft) def _monitor(self,etft): '''Intermittent data saving function which checks a requested feed's out queue and puts any new items into the ADL @param etft: FeedRef of requested restart thread @type etft: FeedRef @return: Dictionary<FeedRef*,List<Address>> ''' #for etft in self.ds:#FeedType.reverse: if self.ds[etft]: while not self.ioq[etft]['out'].empty(): #because the queue isnt populated till all pages are loaded we can just swap out the ADL self.persist.set(etft,self.ioq[etft]['out'].get(),pat=PersistActionType.REPLACE) self.stamp[etft] = time.time() #self.persist.write() return self.persist.get(etft) def response(self,etft=FeedRef((FeatureType.ADDRESS,FeedType.RESOLUTIONFEED))): '''Returns any features lurking in the response queue - Response queue contains esponses to user generated requests @param etft: FeedRef of response thread. Default=Address/Resolution @type etft: FeedRef @return: Feature ''' resp = () delflag = False #while self.ioq.has_key((et,ft)) and not self.ioq[(et,ft)]['resp'].empty(): while etft in FEEDS.values() and not self.ioq[etft]['resp'].empty(): resp += (self.ioq[etft]['resp'].get(),) #don't delete the queue while we're still getting items from it, instead mark it for deletion if etft in FEED0.values(): delflag = True if delflag: self._cullDS(etft) return resp #-------------------------------------------------------------------------- def _populateAddress(self,feature): '''Fill in any required+missing fields if a default value is known (in this case the configured user/org) @param feature: Address object to test and populate @type feature: Address @return: Address ''' if not hasattr(feature,'_workflow_sourceUser') or not feature.getSourceUser(): feature.setSourceUser(self.conf['user']) if not hasattr(feature,'_workflow_sourceOrganisation') or not feature.getSourceOrganisation(): feature.setSourceOrganisation(self.conf['org']) return feature def _populateGroup(self,feature): '''Fill in any required+missing fields if a default value is known (in this case the configured user/submitter) @param feature: Group object to test and populate @type feature: Group @return: Group ''' if not hasattr(feature,'_workflow_sourceUser') or not feature.getSourceUser(): feature.setSourceUser(self.conf['user']) if not hasattr(feature,'_submitterUserName') or not feature.getSubmitterUserName(): feature.setSubmitterUserName(self.conf['user']) return feature # Convenience Methods #---------------------------- def addAddress(self,address,reqid=None): '''Convenience method to send/add an Address to the changefeed. @param address: Address object to add @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._addressAction(address,ActionType.ADD,reqid) def retireAddress(self,address,reqid=None): '''Convenience method to send/retire an Address from the changefeed. @param address: Address object to retire @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._addressAction(address,ActionType.RETIRE,reqid) def updateAddress(self,address,reqid=None): '''Convenience method to send/update an Address on the changefeed. @param address: Address object to update @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._addressAction(address,ActionType.UPDATE,reqid) def _addressAction(self,address,at,reqid=None): '''Address action method performing address/action on the change feed @param address: Address object to update @type address: Address @param at: Action function to perform @type at: ActionType @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' if reqid: address.setRequestId(reqid) self._populateAddress(address).setChangeType(ActionType.reverse[at].title()) self._queueAction(FeedRef((FeatureType.ADDRESS,FeedType.CHANGEFEED)), at, address) #---------------------------- def acceptAddress(self,address,reqid=None): '''Convenience method to send/accept an Address on the resolutionfeed. @param address: Address object to accept @type address: Address @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._addressApprove(address,ApprovalType.ACCEPT,reqid) def declineAddress(self,address,reqid=None): '''Convenience method to send/decline an Address on the resolutionfeed. @param address: Address object to decline @type address: Address @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._addressApprove(address,ApprovalType.DECLINE,reqid) def repairAddress(self,address,reqid=None): '''Convenience method to send/update an Address on the resolutionfeed. @param address: Address object to update @type address: Address @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._addressApprove(address,ApprovalType.UPDATE,reqid) def supplementAddress(self,address,reqid=None): '''Convenience method to fetch additional info on an Address from the resolutionfeed. @param address: Address object to update @type address: Address @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' #HACK. Since a feature address doesn't have a changeid (but its needed for the construction of a resolution feed #request) we substitute the changeId for the version number. This usefully also provides the final component of #the request URL. Note also, approval requests include a payload but for this GET request it isn't needed. ###address.setChangeId(address.getVersion()) ###self._addressApprove(address,ApprovalType.SUPPLEMENT,reqid) #HACK (2). Set changeid to flag supplemental request address.setChangeId('{hsi}{cid}'.format(hsi=HACK_SUP_IND,cid=address.getAddressId())) self._addressApprove(address,ApprovalType.SUPPLEMENT,reqid) def _addressApprove(self,address,at,reqid=None): '''Address approval method performing address/approve actions on the resolution feed @param address: Address object to update @type address: Address @param at: Approval function to perform @type at: ApprovalType @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' if reqid: address.setRequestId(reqid) address.setQueueStatus(ApprovalType.LABEL[at].title()) self._queueAction(FeedRef((FeatureType.ADDRESS,FeedType.RESOLUTIONFEED)), at, address) #============================ def acceptGroup(self,group,reqid=None): '''Convenience method to send/accept a Group on the resolutionfeed. @param group: Group object to accept @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._groupApprove(group, GroupApprovalType.ACCEPT, reqid) def declineGroup(self,group,reqid=None): '''Convenience method to send/decline a Group on the resolutionfeed. @param group: Group object to decline @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._groupApprove(group, GroupApprovalType.DECLINE, reqid) def repairGroup(self,group,reqid=None): '''Convenience method to send/update a Group on the resolutionfeed. @param group: Group object to update @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._groupApprove(group, GroupApprovalType.UPDATE, reqid) def _groupApprove(self,group,gat,reqid=None): '''Group approval method performing group/approve actions on the resolution feed @param group: Group object to update @type group: Group @param gat: Group approval function to perform @type gat: GroupApprovalType @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' if reqid: group.setRequestId(reqid) group.setQueueStatus(GroupApprovalType.LABEL[gat].title()) self._queueAction(FeedRef((FeatureType.GROUPS,FeedType.RESOLUTIONFEED)),gat,group) #---------------------------- def replaceGroup(self,group,reqid=None): '''Convenience method to send/replace a Group on the changefeed. @param group: Group object to replace @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._groupAction(group, GroupActionType.REPLACE, reqid) def updateGroup(self,group,reqid=None): '''Convenience method to send/update a Group on the changefeed. @param group: Group object to update @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._groupAction(group, GroupActionType.UPDATE, reqid) def submitGroup(self,group,reqid=None): '''Convenience method to send/submit a Group to the changefeed. @param group: Group object to submit @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._groupAction(group, GroupActionType.SUBMIT, reqid) def closeGroup(self,group,reqid=None): '''Convenience method to send/close a Group on the changefeed. @param group: Group object to close @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._groupAction(group, GroupActionType.CLOSE, reqid) def addGroup(self,group,reqid=None): '''Convenience method to send/add a Group to the changefeed. @param group: Group object to add @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._groupAction(group, GroupActionType.ADD, reqid) def removeGroup(self,group,reqid=None): '''Convenience method to send/remove a Group from the changefeed. @param group: Group object to remove @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._groupAction(group, GroupActionType.REMOVE, reqid) def _groupAction(self,group,gat,reqid=None): '''Group action method performing group/actions on the change feed @param group: Group object to update @type group: Group @param gat: Group action function to perform @type gat: GroupActionType @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' if reqid: group.setRequestId(reqid) self._populateGroup(group).setChangeType(GroupActionType.reverse[gat].title()) self._queueAction(FeedRef((FeatureType.GROUPS,FeedType.CHANGEFEED)),gat,group) #---------------------------- def _queueAction(self,feedref,atype,aorg): '''Queue and notify''' self.ioq[feedref]['in'].put({atype:(aorg,)}) self.notify(feedref) #---------------------------- '''User actions are on-demand only and because they won't be run very often are set up and torn down on each use''' def addUser(self,user,reqid=None): '''Convenience method to send/add a User to the adminfeed. @param user: User object to add @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._userAction(user, UserActionType.ADD, reqid) def removeUser(self,user,reqid=None): '''Convenience method to send/remove a User from the adminfeed. @param user: User object to remove @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._userAction(user, UserActionType.DELETE, reqid) def updateUser(self,user,reqid=None): '''Convenience method to send/update a User on the adminfeed. @param user: User object to update @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' self._userAction(user, UserActionType.UPDATE, reqid) def _userAction(self,user,uat,reqid=None): '''User action method performing user/action on the admin feed @param user: User object to update @type user: User @param uat: User action function to perform @type uat: UserActionType @param reqid: User supplied reference value, used to coordinate asynchronous requests/responses @type reqid: Integer ''' if reqid: user.setRequestId(reqid) #self._populateUser(user).setChangeType(UserActionType.reverse[uat].title()) #self._queueAction(FeedRef((FeatureType.GROUPS,FeedType.CHANGEFEED)),gat,group) etft = FeedRef((FeatureType.USERS,FeedType.ADMIN)) self.uads,self.ioq[etft] = self._spawnDS(etft,DataSyncAdmin) self.register(self.uads.drc) self.ioq[etft]['in'].put({uat:(user,)}) self.notify(etft) #convenience method for address casting def castTo(self,requiredtype,address): '''Convenience method abstracting the casting function used to downcast address objects to the various feed required formats @param requiredtype: Address format requirement in FeedRef format @type requiredtype: FeedRef @param address: Address object being cast @type address: Address @return: Address ''' if not requiredtype in FeedType.reverse.keys(): raise Exception('unknown feed/address type') return FeatureFactory.getInstance(FeedRef((FeatureType.ADDRESS,requiredtype))).cast(address) #---------------------------- #CM def __enter__(self): return self def __exit__(self,exc_type=None, exc_val=None, exc_tb=None): return self.close()
class UserFactory(FeatureFactory): ''' UserFactory class used to build user objects''' #PBRANCH = '{d}{}{d}{}'.format(d=DEF_SEP,*Position.BRANCH) UFFT = FeedType.ADMIN DEF_FRT = FeedType.reverse[UFFT] usrtype = User reqtype = UserActionType global aimslog aimslog = Logger.setup() def __init__(self, frt=DEF_FRT): '''Initialise UserFactory object @param frt: FeedRef template reference used to select factory build @type frt: String ''' if frt.k in TP.keys(): self.frt = frt else: raise UserTemplateReferenceException( '{} is not a template key'.format(frt)) self.template = self.readTemplate(TP)[self.frt.k] def __str__(self): return 'AFC.{}'.format(FeedType.reverse(self.UFFT)[:3]) @staticmethod def getInstance(ft): '''Gets an instance of a factory to generate a particular (ft) type of user object @param ft: ''' return UserFactory(ft) #HACK to save rewriting getaddress at gfactory call def get(self, ref=None, usr=None, model=None, prefix=''): '''Creates a user object from a model (using the response template if model is not provided) @param ref: Application generated unique reference string @type ref: String @param usr: User object being populated @type usr: User @param model: Dictionary object (matching or derived from a template) containing address attribute information @param prefix: String value used to flatten/identify nested dictionary elements @type prefix: String @return: (Minimally) Populated user object ''' #overwrite = model OR NOT(address). If an address is provided only fill it with model provided, presume dont want template fill overwrite = False if not usr: overwrite = True usr = self.usrtype(ref) if model: data = model overwrite = True else: data = self.template['response'] if overwrite: try: #if SKIP_NULL: data = self._delNull(data) usr = self._read(usr, data, prefix) except Exception as e: msg = 'Error creating address object using model {} with {}'.format( data, e) aimslog.error(msg) raise UserCreationException(msg) return usr def _read(self, usr, data, prefix): '''Recursive user setting attribute dict reader. @param usr: Active user object @type usr: User @param data: Active (subset) dict @param prefix: String value used to flatten/identify nested dictionary elements @type prefix: String @return: Active (partially filled) user object ''' for k in data: setter = 'set' + k[0].upper() + k[1:] new_prefix = prefix + DEF_SEP + k if isinstance(data[k], dict): usr = self._read(usr=usr, data=data[k], prefix=new_prefix) #elif isinstance(data[k],list) and new_prefix == self.PBRANCH: # pstns = [] # for pd in data[k]: pstns.append(Position.getInstance(pd,self)) # adr.setAddressPositions(pstns) else: getattr(usr, setter)(self.filterPI(data[k]) or None) if hasattr( usr, setter) else setattr( usr, new_prefix, self.filterPI(data[k]) or None) return usr def cast(self, usr): '''Casts user from current type to requested user-type, eg UserAdmin... <future enhancement?> @param usr: User being converted @type usr: User @return: User cast to the self type ''' #this is going to return the same thing since there is only one user type. *Left in as enhancement return User.clone(usr, self.get()) def convert(self, usr, uat): '''Converts a user into its json payload equivalent @param usr: User objects being converted to JSON string @type usr: User @param uat: Action to perform on group @type uat: UserActionType @return: Representative JSON string (minimally compliant with type template) ''' full = None try: full = self._convert( usr, copy.deepcopy( self.template[self.reqtype.reverse[uat].lower()])) if SKIP_NULL: full = self._delNull(full) except Exception as e: msg = 'Error converting user object using AT{} with {}'.format( uat, e) aimslog.error(msg) raise UserConversionException(msg) return full def _convert(self, usr, dat, key=''): '''Recursive part of convert @param usr: Active user object @type usr: User @param dat: Active (subset) dict @param key: String value used to flatten/identify nested dictionary elements @type key: String @return: Processed (nested) dict ''' for attr in dat: new_key = key + DEF_SEP + attr #if new_key == self.PBRANCH: # dat[attr] = usr.getConvertedAddressPositions() if isinstance(dat[attr], dict): dat[attr] = self._convert(usr, dat[attr], new_key) else: dat[attr] = self._assign(dat, usr, new_key) return dat def _assign(self, dat, usr, key): '''Validates group data value against template requirements reading tags to identify default and required data fields - `oneof` indicates field is required and is one of the values in the subsequent pipe separated list - `required` indicates the field is required. An error will be thrown if a suitable values is unavailable @param dat: Active (subset) dict @param usr: Active user object @type usr: User @param key: String value used to flatten/identify nested dictionary elements @type key: String @return: Active (partially filled) address ''' #TODO add default or remove from filterpi required, oneof, default, datatype = 4 * (None, ) val = usr.__dict__[key] if hasattr(usr, key) else None dft = dat[key[key.rfind(DEF_SEP) + 1:]] if dft and dft.startswith('#'): pi = dft.replace('#', '').split(',') required = 'required' in pi oneof = [ pv[6:].strip('()').split('|') for pv in pi if pv.startswith('oneof') ] default = oneof[0][0] if required and oneof else None if required and not val: aimslog.error('AddressFieldRequired {}'.format(key)) raise UserFieldRequiredException( 'Address field {} required'.format(key)) if oneof and val and val not in oneof[0]: aimslog.error('AddressFieldIncorrect {}={}'.format(key, val)) raise UserFieldIncorrectException( 'Address field {}={} not one of {}'.format(key, val, oneof[0])) return val if val else default
class DataUpdater(Observable): '''Mantenence thread comtrolling data updates and api interaction. Instantiates an amisapi instance with wrappers for initialisation of local data store and change/resolution feed updating ''' #et = FeatureType.ADDRESS #ft = FeedType.FEATURES address = None pno = 0 changeId = 0 global aimslog aimslog = Logger.setup() getfeat = None def __init__(self,params,queue): '''DataUpdater base initialiser. @param params: List of configuration parameters @type params: List<?> @param queue: Response queue @type queues: Queue.Queue ''' super(DataUpdater,self).__init__() self.ref,self.conf,self.factory = params self.queue = queue #self._stop = threading.Event() self.api = AimsApi(self.conf) def setup(self,etft,sw,ne,pno): '''Parameter setup. @param etft: Feed/Feature identifier @type etft: FeedRef @param sw: South-West corner, coordinate value pair @type sw: List<Double>{2} @param ne: North-East corner, coordinate value pair @type ne: List<Double>{2} @param pno: Feed page number @type pno: Integer ''' self.etft = etft self.sw,self.ne = sw,ne self.pno = pno def run(self): '''Main updater run method fetching single page of addresses from API''' aimslog.info('GET.{} {} - Page{}'.format(self.ref,self.etft,self.pno)) featlist = [] #for page in self.api.getOnePage(self.etft,self.sw,self.ne,self.pno): # featlist.append(self.processPage(page,self.etft)) ce,pages = self.api.getOnePage(self.etft,self.sw,self.ne,self.pno) if any(ce.values()): aimslog.error('Single-page request failure {}'.format(ce)) if pages.has_key('entities'): for page in pages['entities']: featlist.append(self.processPage(page,self.etft)) else: aimslog.error('Single-page response missing entities') self.queue.put(featlist) self.notify(self.ref) def processPage(self,page,etft): '''Process an individual page. If page is resolution type optionally re-query at individual level @param page: Processed results from pno request @type page: Dict<?> @param etft: Feed/Feature identifier @type etft: FeedRef ''' if etft.ft == FeedType.RESOLUTIONFEED and ENABLE_ENTITY_EVALUATION: cid = self.cid(page) ce,feat = self.api.getOneFeature(etft,cid) if any(ce.values()): aimslog.error('Single-feature request failure {}'.format(ce)) if feat == {u'class': [u'error']}: #if the pno request returns the not-supposed-to-happen error, it gets special treatment aimslog.error('Invalid API response {}'.format(feat)) #return self.factory.get(model=pno['properties']) else: return self._processEntity(feat,cid,etft) else: #just return the main feedlevel address objects return self.factory.get(model=page['properties']) def _processEntity(self,feat,cid,etft): '''Identify and select group, address or entity processing for a reolutionfeed feature @param feat: dict representation of feature before object processing @type feat: Dict @param cid: Change ID or group change ID @type cid: Integer @param etft: Feed/Feature identifier @type etft: FeedRef @return: Instantiated feature object ''' if feat['class'][0] == 'validation': return self._processValidationEntity(feat) #e = EntityValidation.getInstance(feat)# self.getEntityInstance() #------------------------------- # Resolution Group elif feat['class'][0] == 'resolutiongroup': return self._processResolutionGroup(feat,cid,etft) # Address List elif feat['class'][0] == 'addressresolution': return self._processAddressResolution(feat) #-------------------------------- # Simple Entity object else: return self._processSimpleEntity(self.factory.get,feat) def _processValidationEntity(self,feat): '''Wraps call to validation entity instantiator @param feat: dict representation of feature before object processing @type feat: Dict @return: Instantiated validation Entity ''' return EntityValidation.getInstance(feat) def _processAddressEntity(self,feat): '''Processes feature data into address object @param feat: dict representation of feature before object processing @type feat: Dict @return: Instantiated Address entity ''' #return EntityAddress.getInstance(feat) return self._processSimpleEntity(FeatureFactory.getInstance(FeedRef((FeatureType.ADDRESS,FeedType.RESOLUTIONFEED))).get,feat) def _processSimpleEntity(self,fact,feat): '''Default processor for generic entities but the same as address resolution processor (below). @param fact: Link to factory, object instantiation method @type fact: <Function> @param feat: dict representation of feature before object processing @type feat: Dict @return: Instantiated Address entity ''' featurelist = [] a = fact(model=feat['properties']) if feat.has_key('entities'): for e in feat['entities']: featurelist.append(self._populateEntity(e)) a._setEntities(featurelist) return a def _processAddressResolution(self,feat): '''Processes entries in the addressresolution entities list @param feat: dict representation of feature before object processing @type feat: Dict @return: Instantiated Address entity ''' featurelist = [] a = self.factory.get(model=feat['properties']) for e in feat['entities']: featurelist.append(self._populateEntity(e)) a._setEntities(featurelist) return a def _processResolutionGroup(self,feat,cid,etft): '''Processes the res-address objects in a res-group. Subsequently populates the sub entities as feature-addresses. @param feat: dict representation of feature before object processing @type feat: Dict @param cid: Change ID or group change ID @type cid: Integer @param etft: Feed/Feature identifier @type etft: FeedRef @return: Instantiated feature object ''' featurelist = [] g = self.factory.get(model=feat['properties'])#group #HACK subst cid for cid+count string ce,feat2 = self.api.getOneFeature(etft,'{}/address?count={}'.format(cid,MAX_FEATURE_COUNT))#group entity/adr list if any(ce.values()): aimslog.error('Single-feature request failure {}'.format(ce)) etft2 = FeedRef((FeatureType.ADDRESS,FeedType.RESOLUTIONFEED)) factory2 = FeatureFactory.getInstance(etft2) for f in feat2['entities']: a = factory2.get(model=f['properties']) elist2 = [] for e in f['entities']: elist2.append(self._populateEntity(e)) a._setEntities(elist2) featurelist.append(a) g._setEntities(featurelist) return g def _populateEntity(self,ent): '''Selects type and instantiates appropriate entity object. @param ent: dict representation of feature before object processing @type ent: Dict ''' if ent['class'][0] == 'validation': return self._processValidationEntity(ent) elif ent['class'][0] == 'address': ###res factory might work here instead #etft3 = FeedRef((FeatureType.ADDRESS,FeedType.FEATURES)) #factory3 = FeatureFactory.getInstance(etft3) #return factory3.get(model=e['properties']) return self._processAddressEntity(ent) else: return Entity.getInstance(ent) @staticmethod def getInstance(etft): '''Based on the provided FeedRef this getInstance returns a group,address or user updater object @param etft: Feed/Feature identifier @type etft: FeedRef ''' if etft.et == FeatureType.GROUPS: return DataUpdaterGroup elif etft.et == FeatureType.ADDRESS: return DataUpdaterAddress elif etft.et == FeatureType.USERS: return DataUpdaterUser else: raise DataUpdaterSelectionException('Select Address,Groups or Users') def stop(self): self._stop.set() def stopped(self): return self._stop.isSet() def close(self): aimslog.info('Queue {} stopped'.format(self.queue.qsize())) self.queue.task_done() #executed by subclass def cid(self,_): pass
class Feature(object): '''Feature data object representing AIMS primary objects Addresses, Groups and Users''' type = FeedType.FEATURES global aimslog aimslog = Logger.setup() def __init__(self, ref=None): #aimslog.info('AdrRef.{}'.format(ref)) self._ref = ref #self._hash = self._hash()#no point, empty #generic validators @staticmethod def _vString(sval): return isinstance(sval, str) #alpha only filter? @staticmethod def _vInt(ival): return isinstance(ival, int) #range filter? @staticmethod def _vDate(date): return Feature._vString(date) and bool( re.match('^\d{4}-\d{2}-\d{2}$', date)) @staticmethod def _vEmail(email): return Feature._vString(email) and bool( re.match( '^([a-zA-Z0-9_\-\.]+)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.)|(([a-zA-Z0-9\-]+\.)+))([a-zA-Z]{2,4}|[0-9]{1,3})(\]?)$', email)) #COMMON--------------------------------------------- #version not used on non feed feaures types but its inclusion here won't matter def setVersion(self, version): self._version = version if Feature._vInt(version) else None def getVersion(self): return self._version def setSourceUser(self, sourceUser): self._workflow_sourceUser = sourceUser def getSourceUser(self): return self._workflow_sourceUser def setSourceOrganisation(self, sourceOrganisation): self._workflow_sourceOrganisation = sourceOrganisation def getSourceOrganisation(self): return self._workflow_sourceOrganisation def setChangeType(self, changeType): self._changeType = changeType def getChangeType(self): return self._changeType def setQueueStatus(self, queueStatus): self._queueStatus = queueStatus def getQueueStatus(self): return self._queueStatus #--------------------------------------------------- def _setEntities(self, entities): self.setMeta() self.meta.entities = entities def _getEntities(self): return self.meta.entities def setMeta(self, meta=None): if not hasattr(self, 'meta'): self.meta = meta if meta else FeatureMetaData() def getMeta(self): return self.meta if hasattr(self, 'meta') else None # def compare(self,other): # '''Feature equality comparator using simple attribute comparison # @param other: Another Feature object whose attributes will be compared to selfs # @return: Boolean # ''' # #return False if isinstance(self,other) else hash(self)==hash(other) # #IMPORTANT. Attribute value compare only useful with distinct (deepcopy'd) instances # return all((getattr(self,a)==getattr(other,a) for a in self.__dict__.keys())) def merge(self, other, exclude=''): '''Merges new (other) atributes into existing (self) object @param other: Another Feature object whose attributes will be added to selfs attributes @return: Feature ''' for key in other.__dict__.keys(): if key not in exclude.split(','): setattr(self, key, getattr(other, key)) return self #--------------------------------- def setRequestId(self, requestId): '''Set meta requestid variable on Feature object @param requestId: User generated variable attatched to and identifying AIMS individual requests. Integer type restricted at meta setter @type requestId: Integer ''' self.setMeta() self.meta.requestId = requestId def getRequestId(self): return self.meta.requestId if hasattr(self, 'meta') else None def setErrors(self, errors): '''Set meta error variable on Feature object @param error: Error string typically set from HTTP error message returned at API interface @type error: String ''' self.setMeta() self.meta.errors = errors def getErrors(self): return self.meta.errors if hasattr(self, 'meta') else None #object hash of attributes for page comparison def getHash(self): '''Generates unique hash values for Feature objects. Hashes are calculated by reading all attributes excluding the ref, meta and position attributes. Numeric values are converted to string and unicode values are encoded The resulting string attributes are append reduced and their md5 calculated. The hexdigest of this hash is returned @return: 32 digit hexdigest representing hash code ''' #discard all list/nested attributes? This should be okay since we capture the version addess|changeId in the top level s0 = [ getattr(self, z) for z in self.__dict__.keys() if z not in HASH_EXCLUDES ] s1 = [str(z) for z in s0 if isinstance(z, (int, float, long, complex))] s2 = [ z.encode('utf8') for z in s0 if isinstance(z, (basestring)) and z not in s1 ] #return reduce(lambda x,y: x.update(y), s1+s2,hashlib.md5()) #reduce wont recognise haslib objs self.setMeta() self.meta.hash = hashlib.md5(reduce(lambda x, y: x + y, s1 + s2)).hexdigest() return self.meta.hash @staticmethod def clone(a, b=None): '''Clones attributes of A to B and instantiates B (as type A) if not provided @param a: Feature object to-be cloned @type a: Feature @param b: Feature object being overwritten (optional) @type b: Feature @return: Manual deepcop of Feature object ''' #duplicates only attributes set in source object from FeatureFactory import FeatureFactory if not b: b = FeatureFactory.getInstance(a.type).get() for attr in a.__dict__.keys(): setattr(b, attr, getattr(a, attr)) return b @staticmethod def compare(a, b): '''Compares supplied feature with each other using computed hash values @param a: One of the features being compared @type a: Feature @param b: The other feature being compared @type b: Feature ''' #TODO implement an ordering criteria for sorting return a.getHash() == b.getHash()