def _process_certificate(self, cert): """ Process an Client Certificate. """ try: nia = cert.get_subject().get_components()[4][1].split( 'NIA')[1].lstrip() except: return { 'success': False, 'message': 'El certificado no es válido.' } store = Store(Database().get_database()) result = store.find(UserProfile, UserProfile.nia == int(nia)).one() if not result: return { 'success': False, 'message': 'Este certificado digital no tiene un usuario asociado.' } user = store.find(UserData, UserData.id == result.user_id).one() if not user: return { 'success': False, 'message': 'Este certificado digital no tiene un usuario asociado.' } return {'success': True, 'user': user}
def perform_data_update(dbfile): store = Store(create_database(GLSettings.make_db_uri(dbfile))) enabled_languages = [ lang.name for lang in store.find(l10n.EnabledLanguage) ] removed_languages = list( set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if len(removed_languages): removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception( "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages) try: db_perform_data_update(store) store.commit() except: store.rollback() raise finally: store.close()
class Storm(object): def __init__(self,db="sqlite"): uname="root" passw="" if db=="postgres": passw="root" elif db=="sqlite": expr="sqlite:" if db!="sqlite": expr="{db}://{usern}:{passw}@localhost/test".format(db=db,usern=uname,passw=passw) self.database = create_database(expr) self.store = Store(self.database) #self.store.execute("DROP TABLE users") self.store.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, login VARCHAR(8), userid INTEGER, projid INTEGER)") def insert(self): users=(User(id=userid,login=unicode(who), userid=userid, projid=randint(1,5)) for who, userid in randName()) map(self.store.add,users) def update(self): fr=randint(1,5) to=rand_generator(randint(1,5),fr) q=self.store.find(User, User.projid == fr) c=q.count() q.set(projid=to) return fr,to,c def delete(self): fr=randint(1,5) q=self.store.find(User, User.projid == fr) c=q.count() q.remove() return fr,c def drop(self): pass def dbDump(self): for i in self.store.find(User): print i
def main(): db = create_database("sqlite:laps.sqlite") store = Store(db) racers = store.find(Racer) print 'Categoria,Número,Nome,L1,L2,L3,L4,L5,L6,L7,L8,Total' for r in racers: data = [r.category.name, r.number, r.name] #print r.number, r.name for i, lap in enumerate(list(r.get_laps()), 1): assert i == lap.lap_number #print ' ', i, lap.lap_number, lap.lap_time, lap #data.append(str(lap.lap_time)) data.append(lap.lap_time.seconds) data.extend([0] * (11 - len(data))) data.append(r.total_time) print ','.join(str(i) for i in data)
def test_json_property(self): """The JSON property is encoded as JSON""" class TestModel(object): __storm_table__ = "json_test" id = Int(primary=True) json = JSON() connection = self.database.connect() value = {"a": 3, "b": "foo", "c": None} db_value = json.dumps(value).decode("utf-8") connection.execute("INSERT INTO json_test (json) VALUES (?)", (db_value, )) connection.commit() store = Store(self.database) obj = store.find(TestModel).one() store.close() # The JSON object is decoded to python self.assertEqual(value, obj.json)
def perform_data_update(dbfile): store = Store(create_database(GLSettings.make_db_uri(dbfile))) enabled_languages = [lang.name for lang in store.find(l10n.EnabledLanguage)] removed_languages = list(set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if len(removed_languages): removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception("FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages) try: db_perform_data_update(store) store.commit() except: store.rollback() raise finally: store.close()
def perform_schema_migration(version): """ @param version: @return: """ to_delete_on_fail = [] to_delete_on_success = [] if version < FIRST_DATABASE_VERSION_SUPPORTED: GLSettings.print_msg( "Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED) quit() tmpdir = os.path.abspath(os.path.join(GLSettings.db_path, 'tmp')) orig_db_file = os.path.abspath( os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version)) final_db_file = os.path.abspath( os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION)) shutil.rmtree(tmpdir, True) os.mkdir(tmpdir) shutil.copy2(orig_db_file, tmpdir) new_db_file = None try: while version < DATABASE_VERSION: old_db_file = os.path.abspath( os.path.join(tmpdir, 'glbackend-%d.db' % version)) new_db_file = os.path.abspath( os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1))) GLSettings.db_file = new_db_file GLSettings.enable_input_length_checks = False to_delete_on_fail.append(new_db_file) to_delete_on_success.append(old_db_file) GLSettings.print_msg("Updating DB from version %d to version %d" % (version, version + 1)) store_old = Store(create_database('sqlite:' + old_db_file)) store_new = Store(create_database('sqlite:' + new_db_file)) # Here is instanced the migration script MigrationModule = importlib.import_module( "globaleaks.db.migrations.update_%d" % (version + 1)) migration_script = MigrationModule.MigrationScript( migration_mapping, version, store_old, store_new) GLSettings.print_msg("Migrating table:") try: try: migration_script.prologue() except Exception as exception: GLSettings.print_msg( "Failure while executing migration prologue: %s" % exception) raise exception for model_name, _ in migration_mapping.iteritems(): if migration_script.model_from[ model_name] is not None and migration_script.model_to[ model_name] is not None: try: migration_script.migrate_model(model_name) # Commit at every table migration in order to be able to detect # the precise migration that may fail. migration_script.commit() except Exception as exception: GLSettings.print_msg( "Failure while migrating table %s: %s " % (model_name, exception)) raise exception try: migration_script.epilogue() migration_script.commit() except Exception as exception: GLSettings.print_msg( "Failure while executing migration epilogue: %s " % exception) raise exception finally: # the database should be always closed before leaving the application # in order to not keep leaking journal files. migration_script.close() GLSettings.print_msg("Migration stats:") # we open a new db in order to verify integrity of the generated file store_verify = Store( create_database(GLSettings.make_db_uri(new_db_file))) for model_name, _ in migration_mapping.iteritems(): if model_name == 'ApplicationData': continue if migration_script.model_from[ model_name] is not None and migration_script.model_to[ model_name] is not None: count = store_verify.find( migration_script.model_to[model_name]).count() if migration_script.entries_count[model_name] != count: if migration_script.fail_on_count_mismatch[model_name]: raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \ (model_name, count, migration_script.entries_count[model_name])) else: GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \ (model_name, migration_script.entries_count[model_name], count)) else: GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \ (model_name, migration_script.entries_count[model_name])) version += 1 store_verify.close() perform_data_update(new_db_file) except Exception as exception: # simply propagate the exception raise exception else: # in case of success first copy the new migrated db, then as last action delete the original db file shutil.copy(new_db_file, final_db_file) security.overwrite_and_remove(orig_db_file) finally: # Always cleanup the temporary directory used for the migration for f in os.listdir(tmpdir): tmp_db_file = os.path.join(tmpdir, f) security.overwrite_and_remove(tmp_db_file) shutil.rmtree(tmpdir)
class TSExperimentSvcAgent(TSLocalAgent): agentId = expsvcAgentId uuid = expsvcAgentUUID agentType = expsvcAgentType def __init__(self, server, connString): TSLocalAgent.__init__(self, server) self.rootAgent = server.localAgents[0] self.userAgent = server.localAgents[1] self.database = create_database(connString) self.dbStore = Store(self.database) self.server.listenerAgents.append(AgentListener('load', self.onAgentRegister, self.onAgentDisconnect)) self.loadAgents = {} self.resourceManager = ResourceManager(self.dbStore) @inlineCallbacks def registerNewAgent(self, client, agent): hostInfo = yield agent.getHostInfo() agentObj = Agent() agentObj.uuid = uuid.UUID(client.agentUuid) agentObj.agentType = 'load' # FIXME: Sometimes domainname passed with hostname agentObj.hostname = unicode(hostInfo.hostname) agentObj.domainname = unicode(hostInfo.domainname) agentObj.osname = hostInfo.osname agentObj.release = hostInfo.release agentObj.machineArch = hostInfo.machineArch agentObj.numCPUs = hostInfo.numCPUs agentObj.numCores = hostInfo.numCores agentObj.memTotal = hostInfo.memTotal agentObj.lastOnline = datetime.now() yield self.dbStore.add(agentObj) yield self.dbStore.commit() returnValue(agentObj) @inlineCallbacks def onAgentRegister(self, client): if client.agentType == 'load': if client.agentUuid in self.loadAgents: raise JSONTS.Error(JSONTS.AE_INVALID_STATE, "Loader agent with uuid '%s' already registered" % client.agentUuid) agentId = client.getId() agent = self.createRemoteAgent(agentId, LoadAgent) agentSet = yield self.dbStore.find(Agent, Agent.uuid == uuid.UUID(client.agentUuid)) agentObj = yield agentSet.one() if agentObj is None: agentObj = yield self.registerNewAgent(client, agent) else: # Update last online timestamp agentObj.lastOnline = datetime.now() yield self.dbStore.add(agentObj) yield self.dbStore.commit() agentInfo = LoadAgentInfo(agent, agentObj) self.loadAgents[client.agentUuid] = agentInfo print "Registered agent %s with uuid '%s'" % (agentObj.hostname, client.agentUuid) reactor.callLater(0.0, self.fetchWorkloadTypes, agent, agentObj) reactor.callLater(0.1, self.resourceManager.registerLoadAgent, agent, agentObj) def onAgentDisconnect(self, client): if client.agentType == 'load': yield self.resourceManager.unregisterLoadAgent(agentInfo.agentObj) agentInfo = self.loadAgents[client.agentUuid] print 'Disconnected agent %s' % agentInfo.agentObj.hostname del self.loadAgents[client.agentUuid] @inlineCallbacks def fetchWorkloadTypes(self, agent, agentObj): wltypeList = yield agent.getWorkloadTypes() wltSet = yield self.dbStore.find(WorkloadType, WorkloadType.agent == agentObj) for wltypeName, wltype in wltypeList.iteritems(): wltObj = wltSet.find(WorkloadType.name == wltypeName) wltObj = wltObj.any() if wltObj is None: wltObj = WorkloadType() wltObj.agent = agentObj wltObj.name = wltypeName wltObj.module = wltype.module wltObj.modulePath = wltype.path wltObj.classList = ','.join(wltype.wlclass) yield self.dbStore.add(wltObj) paramSet = yield self.dbStore.find(WorkloadParam, WorkloadParam.workloadType == wltObj) # Update parameter list for paramObj in paramSet: if paramObj.name not in wltype.params: paramObj.remove() continue paramObj.data = wltype.params.serialize() # Remove serialized object from params array del wltype.params[paramObj.name] yield self.dbStore.add(paramObj) for paramName, param in wltype.params.iteritems(): paramObj = WorkloadParam() paramObj.name = paramName paramObj.workloadType = wltObj paramObj.paramData = TSWorkloadParameter.serialize(param) yield self.dbStore.add(paramObj) yield self.dbStore.commit() @TSMethodImpl(ExpSvcAgent.listAgents) @inlineCallbacks def listAgents(self, context): agentsList = {} agentSet = yield self.dbStore.find(Agent) for agentObj in agentSet: # TODO: should filter agents according to users ACL agentUuid = str(agentObj.uuid) descriptor = TSExpSvcAgentDescriptor() descriptor.agentId = agentObj.id descriptor.lastOnline = datetimeToTSTime(agentObj.lastOnline) descriptor.isOnline = agentUuid in self.loadAgents for field in ('hostname', 'domainname', 'osname', 'release', 'machineArch', 'numCPUs', 'numCores', 'memTotal'): setattr(descriptor, field, getattr(agentObj, field)) agentsList[agentUuid] = descriptor returnValue(agentsList) @TSMethodImpl(ExpSvcAgent.getWorkloadTypes) @inlineCallbacks def getWorkloadTypes(self, context, agentId): wltSet = yield self.dbStore.find(WorkloadType, WorkloadType.aid == agentId) paramsQuery = (WorkloadParam, Join(WorkloadType, And(WorkloadType.aid == agentId, WorkloadParam.wltid == WorkloadType.id))) paramsGlobalSet = yield self.dbStore.using(*paramsQuery).find(WorkloadParam) wltypeList = {} for wltObj in wltSet: paramsSet = yield paramsGlobalSet.find(WorkloadParam.workloadType == wltObj) wltype = TSWorkloadType() wltype.module = wltObj.module wltype.path = wltObj.modulePath wltype.wlclass = wltObj.classList.split(',') wltype.params = {} for paramObj in paramsSet: param = TSWorkloadParameter.deserialize(paramObj.paramData) wltype.params[paramObj.name] = param wltypeList[wltObj.name] = wltype returnValue(wltypeList) @TSMethodImpl(ExpSvcAgent.getAgentResources) @inlineCallbacks def getAgentResources(self, context, agentId): resourceInfo = yield self.resourceManager.getAgentResources(agentId) returnValue(resourceInfo) @inlineCallbacks def _getProfileObj(self, context, profileName, profile): client = context.client if client.auth == TSServerClient.AUTH_MASTER or \ client.auth == TSServerClient.AUTH_ADMIN: userId = profile.userId if profile.userId is not None else 0 else: agentId = client.getId() userId = self.userAgent.agentUsers[agentId] profileSet = yield self.dbStore.find(ExperimentProfile, And(ExperimentProfile.name == unicode(profileName), ExperimentProfile.userId == userId)) returnValue((profileSet.one(), userId)) @TSMethodImpl(ExpSvcAgent.listProfiles) @inlineCallbacks def listProfiles(self, context): '''If context is of administrative rights (master.key or admin), list all experiments, or select only owned experiment''' client = context.client # TODO: Support for experiment sharing profiles = {} if client.auth == TSServerClient.AUTH_MASTER or \ client.auth == TSServerClient.AUTH_ADMIN: profileSet = yield self.dbStore.find(ExperimentProfile) else: agentId = client.getId() userId = self.userAgent.agentUsers[agentId] profileSet = yield self.dbStore.find(ExperimentProfile, ExperimentProfile.userId == userId) for profileObj in profileSet: profiles[profileObj.name] = _profileObjToTSO(profileObj, TSExperimentProfileInfo) returnValue(profiles) @TSMethodImpl(ExpSvcAgent.getProfile) @inlineCallbacks def getProfile(self, context, profileName, profile): profileObj, _ = yield self._getProfileObj(context, profileName, profile) fullProfile = _profileObjToTSO(profileObj, TSExperimentProfile) fullProfile.threadpools = {} fullProfile.workloads = {} threadpoolSet = yield self.dbStore.find(ExperimentThreadPool, ExperimentThreadPool.profile == profileObj) for threadpoolObj in threadpoolSet: threadpool = TSExperimentThreadPool() threadpool.agentId = threadpoolObj.aid threadpool.numWorkers = threadpoolObj.numWorkers fullProfile.threadpools[threadpoolObj.name] = threadpool workloadSet = yield self.dbStore.find(ExperimentWorkload, ExperimentWorkload.profile == profileObj) for workloadObj in workloadSet: workload = TSExperimentWorkload() workload.agentId = workloadObj.threadpool.aid if workloadObj.threadpool is not None else -1 workload.workloadType = workloadObj.workloadType.name if workloadObj.workloadType is not None else '' workload.threadpool = workloadObj.threadpool.name if workloadObj.threadpool is not None else '' workload.params = workloadObj.params fullProfile.workloads[workloadObj.name] = workload returnValue(fullProfile) @TSMethodImpl(ExpSvcAgent.configureProfile) @inlineCallbacks def configureProfile(self, context, profileName, profile): profileObj, userId = yield self._getProfileObj(context, profileName, profile) newProfile = False threadpools = {} if profileObj is None: profileObj = ExperimentProfile() profileObj.name = unicode(profileName) profileObj.userId = userId profileObj.creationDate = datetime.now() newProfile = True profileObj.description = unicode(profile.description) yield self.dbStore.add(profileObj) if not newProfile: threadpoolSet = yield self.dbStore.find(ExperimentThreadPool, ExperimentThreadPool.profile == profileObj) # Update or remove existing threadpools for threadpoolObj in threadpoolSet: if threadpoolObj.name in profile.threadpools: threadpool = profile.threadpools[threadpoolObj.name] threadpoolObj.aid = threadpool.agentId threadpoolObj.numWorkers = threadpool.numWorkers del profile.threadpools[threadpoolObj.name] yield self.dbStore.add(threadpoolObj) threadpools[threadpoolObj.name] = threadpoolObj else: yield self.dbStore.remove(threadpoolObj) # Add new threadpools for threadpoolName, threadpool in profile.threadpools.iteritems(): threadpoolObj = ExperimentThreadPool() threadpoolObj.name = unicode(threadpoolName) threadpoolObj.aid = threadpool.agentId threadpoolObj.profile = profileObj threadpoolObj.numWorkers = threadpool.numWorkers threadpools[threadpoolName] = threadpoolObj yield self.dbStore.add(threadpoolObj) @inlineCallbacks def _setWorkloadType(workload, workloadObj): if workload.workloadType is not None and workload.agentId is not None: workloadTypeSet = yield self.dbStore.find(WorkloadType, And(WorkloadType.aid == workload.agentId, WorkloadType.name == unicode(workload.workloadType))) workloadTypeObj = workloadTypeSet.one() workloadObj.wltid = workloadTypeObj.id else: workloadObj.wltid = None def _setThreadpool(workload, workloadObj): workloadObj.tpid = threadpools[workloadObj.name].id \ if workload.threadpool is not None \ else None if not newProfile: workloadSet = yield self.dbStore.find(ExperimentWorkload, ExperimentWorkload.profile == profileObj) for workloadObj in workloadSet: if workloadObj.name in profile.workloads: workload = profile.workloads[workloadObj.name] _setThreadpool(workload, workloadObj) yield _setWorkloadType(workload, workloadObj) workloadObj.params = workload.params del profile.workloads[workloadObj.name] yield self.dbStore.add(workloadObj) else: yield self.dbStore.remove(workloadObj) for workloadName, workload in profile.workloads.iteritems(): workloadObj = ExperimentWorkload() workloadObj.name = unicode(workloadName) workloadObj.profile = profileObj _setThreadpool(workload, workloadObj) yield _setWorkloadType(workload, workloadObj) workloadObj.params = workload.params # TODO: implement workload steps workloadObj.stepsId = None yield self.dbStore.add(workloadObj) yield self.dbStore.commit()
class Form(GladeDelegate): widgets = ['name', 'number', 'category'] race_widgets = ['start_hour', 'time_elapsed'] def __init__(self): GladeDelegate.__init__(self, gladefile="interface.ui", delete_handler=self.quit_if_last) self.proxy = None self.db = create_database("sqlite:laps.sqlite") self.store = Store(self.db) self.race = self._check_race() self.race_proxy = self.add_proxy(self.race, self.race_widgets) self.register_validate_function(self._validation_changed) self._check_categories() self.setup_widgets() def _validation_changed(self, valid): self.save_button.set_sensitive(valid) def _check_race(self): race = self.store.find(Race).one() if race: return race race = Race() race.name = u'First race ever' self.store.add(race) self.store.commit() return race def _check_categories(self): categories = self.race.get_categories() if not categories.is_empty(): return for short, name in CATEGORIES.items(): cat = Category() cat.race = self.race cat.short_name = short cat.name = name cat.total_laps = CATEGORIES_LAPS[short] self.store.add(cat) #for i in range(200): # racer = Racer() # racer.name = unicode(i) # racer.number = i # racer.race = self.race # racer.category = cat # self.store.add(racer) self.store.commit() def setup_widgets(self): self.save_button.set_sensitive(False) self.racer_field.set_sensitive(False) if not self.race.start_time: self.lap_number.set_sensitive(False) else: self.start_button.set_sensitive(False) large = pango.FontDescription('Lucida Sans 32') self.lap_number.modify_font(large) self.time_elapsed.modify_font(large) self.start_hour.modify_font(large) self.hpaned1.set_position(550) # Categores options = sorted([(c.name, c.id) for c in self.race.get_categories()]) self.category.prefill(options) # Lists self.racers.set_columns([ Column('name', title="Nome", data_type=str, sorted=True), Column('number', title='Número', data_type=int), Column('completed_laps', title='Completas', data_type=int), Column('total_time', title='Tempo', data_type=str), Column('category_str', title="Categoria", data_type=str) ]) self.categories.set_columns([ Column('short_name', title="Cat", data_type=str, sorted=True), Column('name', title="Category", data_type=str, expand=True), Column('total_racers', title="Corredores", data_type=str), Column('completed_laps', title="Comp", data_type=int), Column('total_laps', title="Total", data_type=int) ]) self.log.set_columns([ Column('id', data_type=int, visible=False), Column('number', title="#", data_type=int), Column('name', title="Nome", expand=True, data_type=str), Column('event_time', title="Hora", data_type=datetime.time), Column('lap_time', title="Tempo Volta", data_type=str), Column('lap_number', title="Volta N", data_type=int), Column('category', title="Cat", data_type=str), Column('remaining_laps', title="Falta", data_type=int), ]) for racer in self.store.find(Racer): self.racers.append(racer) self.log.extend(self.store.find(RacerLap)) self.categories.extend(self.store.find(Category)) # timer gobject.timeout_add(1000, self._update_timer) # Always show bottom of the log list. self.log.get_treeview().connect('size-allocate', self._treeview_changed) # Show finishing categories in bold self.categories.set_cell_data_func(self._on_categories__cell_data_func) self.log.set_cell_data_func(self._on_log__cell_data_func) self.racers.set_cell_data_func(self._on_racers__cell_data_func) def _on_categories__cell_data_func(self, column, renderer, category, text): if not isinstance(renderer, gtk.CellRendererText): return text completed_laps = category.completed_laps last_lap = completed_laps == (category.total_laps - 1) finished = completed_laps == category.total_laps renderer.set_property('weight-set', last_lap or finished) renderer.set_property('background-set', last_lap or finished) if last_lap or finished: renderer.set_property('weight', pango.WEIGHT_BOLD) if last_lap: renderer.set_property('background', 'yellow') elif finished: renderer.set_property('background', 'green') return text def _on_log__cell_data_func(self, column, renderer, racer_lap, text): if not isinstance(renderer, gtk.CellRendererText): return text remaining_laps = racer_lap.remaining_laps last_lap = remaining_laps == 1 finished = remaining_laps == 0 renderer.set_property('weight-set', last_lap or finished) renderer.set_property('background-set', last_lap or finished) if last_lap or finished: renderer.set_property('weight', pango.WEIGHT_BOLD) if last_lap: renderer.set_property('background', 'yellow') elif finished: renderer.set_property('background', 'green') return text def _on_racers__cell_data_func(self, column, renderer, racer, text): if not isinstance(renderer, gtk.CellRendererText): return text renderer.set_property('weight-set', False) renderer.set_property('background-set', False) if not isinstance(racer, Racer): return text racer_lap = racer.last_lap if not racer_lap: return text remaining_laps = racer_lap.remaining_laps last_lap = remaining_laps == 1 finished = remaining_laps == 0 renderer.set_property('weight-set', last_lap or finished) renderer.set_property('background-set', last_lap or finished) if last_lap or finished: renderer.set_property('weight', pango.WEIGHT_BOLD) if last_lap: renderer.set_property('background', 'yellow') elif finished: renderer.set_property('background', 'green') return text def _treeview_changed(self, widget, event): adj = self.log.get_scrolled_window().get_vadjustment() adj.set_value(adj.upper - adj.page_size) def _update_timer(self): self.race_proxy.update('time_elapsed') return True # # Public API # def add_lap(self, number): racer = self.store.find(Racer, number=number).one() if not racer: print 'recer not found', number return if racer.is_finished: print 'racer finished', number return print 'add_lap', number lap = racer.add_lap() self.log.append(lap) # We need to recalculate the number of laps racer.category.update() racer.update() # Update the categories list. self.categories.refresh(racer.category) self.store.flush() self.store._connection.commit() def edit_racer(self, racer): self._current_model = racer if not self.proxy: self.proxy = self.add_proxy(racer, self.widgets) self.proxy.set_model(racer) self.save_button.set_sensitive(True) self.new_button.set_sensitive(False) self.racer_field.set_sensitive(True) self.name.grab_focus() def save_racer(self): racer = self._current_model self.store.add(racer) if self._is_new: self.racers.append(racer, select=True) self.proxy.set_model(None) self.racer_field.set_sensitive(False) self.save_button.set_sensitive(False) self.new_button.set_sensitive(True) self.racers.refresh() self.store.commit() self._current_model = None # # Callbacks # def on_start_button__clicked(self, button): if self.race.start_time: return self.race.start() self.race_proxy.update('start_hour') self.lap_number.set_sensitive(True) self.start_button.set_sensitive(False) self.store.commit() def on_lap_number__activate(self, widget): try: number = int(widget.get_text()) except: return self.add_lap(number) widget.set_text('') def on_racers__row_activated(self, widget, row): self._is_new = False if isinstance(row, Racer): self.edit_racer(row) def on_new_button__clicked(self, button): self._is_new = True racer = Racer() racer.category = self.category.get_selected() racer.race = self.race self.edit_racer(racer) def on_save_button__clicked(self, button): self.save_racer() def on_number__validate(self, widget, value): query = And(Racer.race == self.race, Racer.id != self._current_model.id, Racer.number == value) if self.store.find(Racer, query).any(): return ValidationError('Número já utilizado') def on_remove_log_button__clicked(self, button): parent = self.get_toplevel().get_toplevel() response = yesno('Deseja remover?', parent=parent, default=False, buttons=((gtk.STOCK_YES, True), (gtk.STOCK_NO, False))) if not response: return log = self.log.get_selected() racer = log.racer self.log.remove(log) self.store.remove(log) self.store.commit() # We need to recalculate the number of laps racer.category.update() racer.update() self.categories.refresh(racer.category) self.lap_number.grab_focus()
def perform_schema_migration(version): """ @param version: @return: """ to_delete_on_fail = [] to_delete_on_success = [] if version < FIRST_DATABASE_VERSION_SUPPORTED: GLSettings.print_msg("Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED) quit() tmpdir = os.path.abspath(os.path.join(GLSettings.db_path, 'tmp')) orig_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version)) final_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION)) shutil.rmtree(tmpdir, True) os.mkdir(tmpdir) shutil.copy2(orig_db_file, tmpdir) new_db_file = None try: while version < DATABASE_VERSION: old_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % version)) new_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1))) GLSettings.db_file = new_db_file GLSettings.enable_input_length_checks = False to_delete_on_fail.append(new_db_file) to_delete_on_success.append(old_db_file) GLSettings.print_msg("Updating DB from version %d to version %d" % (version, version + 1)) store_old = Store(create_database('sqlite:' + old_db_file)) store_new = Store(create_database('sqlite:' + new_db_file)) # Here is instanced the migration script MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1)) migration_script = MigrationModule.MigrationScript(migration_mapping, version, store_old, store_new) GLSettings.print_msg("Migrating table:") try: try: migration_script.prologue() except Exception as exception: GLSettings.print_msg("Failure while executing migration prologue: %s" % exception) raise exception for model_name, _ in migration_mapping.iteritems(): if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: try: migration_script.migrate_model(model_name) # Commit at every table migration in order to be able to detect # the precise migration that may fail. migration_script.commit() except Exception as exception: GLSettings.print_msg("Failure while migrating table %s: %s " % (model_name, exception)) raise exception try: migration_script.epilogue() migration_script.commit() except Exception as exception: GLSettings.print_msg("Failure while executing migration epilogue: %s " % exception) raise exception finally: # the database should be always closed before leaving the application # in order to not keep leaking journal files. migration_script.close() GLSettings.print_msg("Migration stats:") # we open a new db in order to verify integrity of the generated file store_verify = Store(create_database(GLSettings.make_db_uri(new_db_file))) for model_name, _ in migration_mapping.iteritems(): if model_name == 'ApplicationData': continue if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: count = store_verify.find(migration_script.model_to[model_name]).count() if migration_script.entries_count[model_name] != count: if migration_script.fail_on_count_mismatch[model_name]: raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \ (model_name, count, migration_script.entries_count[model_name])) else: GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \ (model_name, migration_script.entries_count[model_name], count)) else: GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \ (model_name, migration_script.entries_count[model_name])) version += 1 store_verify.close() perform_data_update(new_db_file) except Exception as exception: GLSettings.print_msg("[FATAL]: ", exception) raise exception else: # in case of success first copy the new migrated db, then as last action delete the original db file shutil.copy(new_db_file, final_db_file) security.overwrite_and_remove(orig_db_file) finally: # Always cleanup the temporary directory used for the migration for f in os.listdir(tmpdir): tmp_db_file = os.path.join(tmpdir, f) security.overwrite_and_remove(tmp_db_file) shutil.rmtree(tmpdir)
class Form(GladeDelegate): widgets = ['name', 'number', 'category'] race_widgets = ['start_hour', 'time_elapsed'] def __init__(self): GladeDelegate.__init__(self, gladefile="interface.ui", delete_handler=self.quit_if_last) self.proxy = None self.db = create_database("sqlite:laps.sqlite") self.store = Store(self.db) self.race = self._check_race() self.race_proxy = self.add_proxy(self.race, self.race_widgets) self.register_validate_function(self._validation_changed) self._check_categories() self.setup_widgets() def _validation_changed(self, valid): self.save_button.set_sensitive(valid) def _check_race(self): race = self.store.find(Race).one() if race: return race race = Race() race.name = u'First race ever' self.store.add(race) self.store.commit() return race def _check_categories(self): categories = self.race.get_categories() if not categories.is_empty(): return for short, name in CATEGORIES.items(): cat = Category() cat.race = self.race cat.short_name = short cat.name = name cat.total_laps = CATEGORIES_LAPS[short] self.store.add(cat) #for i in range(200): # racer = Racer() # racer.name = unicode(i) # racer.number = i # racer.race = self.race # racer.category = cat # self.store.add(racer) self.store.commit() def setup_widgets(self): self.save_button.set_sensitive(False) self.racer_field.set_sensitive(False) if not self.race.start_time: self.lap_number.set_sensitive(False) else: self.start_button.set_sensitive(False) large = pango.FontDescription('Lucida Sans 32') self.lap_number.modify_font(large) self.time_elapsed.modify_font(large) self.start_hour.modify_font(large) self.hpaned1.set_position(550) # Categores options = sorted([(c.name, c.id) for c in self.race.get_categories()]) self.category.prefill(options) # Lists self.racers.set_columns([ Column('name', title="Nome", data_type=str, sorted=True), Column('number', title='Número', data_type=int), Column('completed_laps', title='Completas', data_type=int), Column('total_time', title='Tempo', data_type=str), Column('category_str', title="Categoria", data_type=str) ]) self.categories.set_columns([ Column('short_name', title="Cat", data_type=str, sorted=True), Column('name', title="Category", data_type=str, expand=True), Column('total_racers', title="Corredores", data_type=str), Column('completed_laps', title="Comp", data_type=int), Column('total_laps', title="Total", data_type=int)]) self.log.set_columns([Column('id', data_type=int, visible=False), Column('number', title="#", data_type=int), Column('name', title="Nome", expand=True, data_type=str), Column('event_time', title="Hora", data_type=datetime.time), Column('lap_time', title="Tempo Volta", data_type=str), Column('lap_number', title="Volta N", data_type=int), Column('category', title="Cat", data_type=str), Column('remaining_laps', title="Falta", data_type=int), ]) for racer in self.store.find(Racer): self.racers.append(racer) self.log.extend(self.store.find(RacerLap)) self.categories.extend(self.store.find(Category)) # timer gobject.timeout_add(1000, self._update_timer) # Always show bottom of the log list. self.log.get_treeview().connect('size-allocate', self._treeview_changed) # Show finishing categories in bold self.categories.set_cell_data_func(self._on_categories__cell_data_func) self.log.set_cell_data_func(self._on_log__cell_data_func) self.racers.set_cell_data_func(self._on_racers__cell_data_func) def _on_categories__cell_data_func(self, column, renderer, category, text): if not isinstance(renderer, gtk.CellRendererText): return text completed_laps = category.completed_laps last_lap = completed_laps == (category.total_laps - 1) finished = completed_laps == category.total_laps renderer.set_property('weight-set', last_lap or finished) renderer.set_property('background-set', last_lap or finished) if last_lap or finished: renderer.set_property('weight', pango.WEIGHT_BOLD) if last_lap: renderer.set_property('background', 'yellow') elif finished: renderer.set_property('background', 'green') return text def _on_log__cell_data_func(self, column, renderer, racer_lap, text): if not isinstance(renderer, gtk.CellRendererText): return text remaining_laps = racer_lap.remaining_laps last_lap = remaining_laps == 1 finished = remaining_laps == 0 renderer.set_property('weight-set', last_lap or finished) renderer.set_property('background-set', last_lap or finished) if last_lap or finished: renderer.set_property('weight', pango.WEIGHT_BOLD) if last_lap: renderer.set_property('background', 'yellow') elif finished: renderer.set_property('background', 'green') return text def _on_racers__cell_data_func(self, column, renderer, racer, text): if not isinstance(renderer, gtk.CellRendererText): return text renderer.set_property('weight-set', False) renderer.set_property('background-set', False) if not isinstance(racer, Racer): return text racer_lap = racer.last_lap if not racer_lap: return text remaining_laps = racer_lap.remaining_laps last_lap = remaining_laps == 1 finished = remaining_laps == 0 renderer.set_property('weight-set', last_lap or finished) renderer.set_property('background-set', last_lap or finished) if last_lap or finished: renderer.set_property('weight', pango.WEIGHT_BOLD) if last_lap: renderer.set_property('background', 'yellow') elif finished: renderer.set_property('background', 'green') return text def _treeview_changed(self, widget, event): adj = self.log.get_scrolled_window().get_vadjustment() adj.set_value(adj.upper - adj.page_size) def _update_timer(self): self.race_proxy.update('time_elapsed') return True # # Public API # def add_lap(self, number): racer = self.store.find(Racer, number=number).one() if not racer: print 'recer not found', number return if racer.is_finished: print 'racer finished', number return print 'add_lap', number lap = racer.add_lap() self.log.append(lap) # We need to recalculate the number of laps racer.category.update() racer.update() # Update the categories list. self.categories.refresh(racer.category) self.store.flush() self.store._connection.commit() def edit_racer(self, racer): self._current_model = racer if not self.proxy: self.proxy = self.add_proxy(racer, self.widgets) self.proxy.set_model(racer) self.save_button.set_sensitive(True) self.new_button.set_sensitive(False) self.racer_field.set_sensitive(True) self.name.grab_focus() def save_racer(self): racer = self._current_model self.store.add(racer) if self._is_new: self.racers.append(racer, select=True) self.proxy.set_model(None) self.racer_field.set_sensitive(False) self.save_button.set_sensitive(False) self.new_button.set_sensitive(True) self.racers.refresh() self.store.commit() self._current_model = None # # Callbacks # def on_start_button__clicked(self, button): if self.race.start_time: return self.race.start() self.race_proxy.update('start_hour') self.lap_number.set_sensitive(True) self.start_button.set_sensitive(False) self.store.commit() def on_lap_number__activate(self, widget): try: number = int(widget.get_text()) except: return self.add_lap(number) widget.set_text('') def on_racers__row_activated(self, widget, row): self._is_new = False if isinstance(row, Racer): self.edit_racer(row) def on_new_button__clicked(self, button): self._is_new = True racer = Racer() racer.category = self.category.get_selected() racer.race = self.race self.edit_racer(racer) def on_save_button__clicked(self, button): self.save_racer() def on_number__validate(self, widget, value): query = And(Racer.race == self.race, Racer.id != self._current_model.id, Racer.number == value) if self.store.find(Racer, query).any(): return ValidationError('Número já utilizado') def on_remove_log_button__clicked(self, button): parent = self.get_toplevel().get_toplevel() response = yesno('Deseja remover?', parent=parent, default=False, buttons=((gtk.STOCK_YES, True), (gtk.STOCK_NO, False))) if not response: return log = self.log.get_selected() racer = log.racer self.log.remove(log) self.store.remove(log) self.store.commit() # We need to recalculate the number of laps racer.category.update() racer.update() self.categories.refresh(racer.category) self.lap_number.grab_focus()
class TSExperimentSvcAgent(TSLocalAgent): agentId = expsvcAgentId uuid = expsvcAgentUUID agentType = expsvcAgentType def __init__(self, server, connString): TSLocalAgent.__init__(self, server) self.rootAgent = server.localAgents[0] self.userAgent = server.localAgents[1] self.database = create_database(connString) self.dbStore = Store(self.database) self.server.listenerAgents.append( AgentListener('load', self.onAgentRegister, self.onAgentDisconnect)) self.loadAgents = {} self.resourceManager = ResourceManager(self.dbStore) @inlineCallbacks def registerNewAgent(self, client, agent): hostInfo = yield agent.getHostInfo() agentObj = Agent() agentObj.uuid = uuid.UUID(client.agentUuid) agentObj.agentType = 'load' # FIXME: Sometimes domainname passed with hostname agentObj.hostname = unicode(hostInfo.hostname) agentObj.domainname = unicode(hostInfo.domainname) agentObj.osname = hostInfo.osname agentObj.release = hostInfo.release agentObj.machineArch = hostInfo.machineArch agentObj.numCPUs = hostInfo.numCPUs agentObj.numCores = hostInfo.numCores agentObj.memTotal = hostInfo.memTotal agentObj.lastOnline = datetime.now() yield self.dbStore.add(agentObj) yield self.dbStore.commit() returnValue(agentObj) @inlineCallbacks def onAgentRegister(self, client): if client.agentType == 'load': if client.agentUuid in self.loadAgents: raise JSONTS.Error( JSONTS.AE_INVALID_STATE, "Loader agent with uuid '%s' already registered" % client.agentUuid) agentId = client.getId() agent = self.createRemoteAgent(agentId, LoadAgent) agentSet = yield self.dbStore.find( Agent, Agent.uuid == uuid.UUID(client.agentUuid)) agentObj = yield agentSet.one() if agentObj is None: agentObj = yield self.registerNewAgent(client, agent) else: # Update last online timestamp agentObj.lastOnline = datetime.now() yield self.dbStore.add(agentObj) yield self.dbStore.commit() agentInfo = LoadAgentInfo(agent, agentObj) self.loadAgents[client.agentUuid] = agentInfo print "Registered agent %s with uuid '%s'" % (agentObj.hostname, client.agentUuid) reactor.callLater(0.0, self.fetchWorkloadTypes, agent, agentObj) reactor.callLater(0.1, self.resourceManager.registerLoadAgent, agent, agentObj) def onAgentDisconnect(self, client): if client.agentType == 'load': yield self.resourceManager.unregisterLoadAgent(agentInfo.agentObj) agentInfo = self.loadAgents[client.agentUuid] print 'Disconnected agent %s' % agentInfo.agentObj.hostname del self.loadAgents[client.agentUuid] @inlineCallbacks def fetchWorkloadTypes(self, agent, agentObj): wltypeList = yield agent.getWorkloadTypes() wltSet = yield self.dbStore.find(WorkloadType, WorkloadType.agent == agentObj) for wltypeName, wltype in wltypeList.iteritems(): wltObj = wltSet.find(WorkloadType.name == wltypeName) wltObj = wltObj.any() if wltObj is None: wltObj = WorkloadType() wltObj.agent = agentObj wltObj.name = wltypeName wltObj.module = wltype.module wltObj.modulePath = wltype.path wltObj.classList = ','.join(wltype.wlclass) yield self.dbStore.add(wltObj) paramSet = yield self.dbStore.find( WorkloadParam, WorkloadParam.workloadType == wltObj) # Update parameter list for paramObj in paramSet: if paramObj.name not in wltype.params: paramObj.remove() continue paramObj.data = wltype.params.serialize() # Remove serialized object from params array del wltype.params[paramObj.name] yield self.dbStore.add(paramObj) for paramName, param in wltype.params.iteritems(): paramObj = WorkloadParam() paramObj.name = paramName paramObj.workloadType = wltObj paramObj.paramData = TSWorkloadParameter.serialize(param) yield self.dbStore.add(paramObj) yield self.dbStore.commit() @TSMethodImpl(ExpSvcAgent.listAgents) @inlineCallbacks def listAgents(self, context): agentsList = {} agentSet = yield self.dbStore.find(Agent) for agentObj in agentSet: # TODO: should filter agents according to users ACL agentUuid = str(agentObj.uuid) descriptor = TSExpSvcAgentDescriptor() descriptor.agentId = agentObj.id descriptor.lastOnline = datetimeToTSTime(agentObj.lastOnline) descriptor.isOnline = agentUuid in self.loadAgents for field in ('hostname', 'domainname', 'osname', 'release', 'machineArch', 'numCPUs', 'numCores', 'memTotal'): setattr(descriptor, field, getattr(agentObj, field)) agentsList[agentUuid] = descriptor returnValue(agentsList) @TSMethodImpl(ExpSvcAgent.getWorkloadTypes) @inlineCallbacks def getWorkloadTypes(self, context, agentId): wltSet = yield self.dbStore.find(WorkloadType, WorkloadType.aid == agentId) paramsQuery = (WorkloadParam, Join( WorkloadType, And(WorkloadType.aid == agentId, WorkloadParam.wltid == WorkloadType.id))) paramsGlobalSet = yield self.dbStore.using( *paramsQuery).find(WorkloadParam) wltypeList = {} for wltObj in wltSet: paramsSet = yield paramsGlobalSet.find( WorkloadParam.workloadType == wltObj) wltype = TSWorkloadType() wltype.module = wltObj.module wltype.path = wltObj.modulePath wltype.wlclass = wltObj.classList.split(',') wltype.params = {} for paramObj in paramsSet: param = TSWorkloadParameter.deserialize(paramObj.paramData) wltype.params[paramObj.name] = param wltypeList[wltObj.name] = wltype returnValue(wltypeList) @TSMethodImpl(ExpSvcAgent.getAgentResources) @inlineCallbacks def getAgentResources(self, context, agentId): resourceInfo = yield self.resourceManager.getAgentResources(agentId) returnValue(resourceInfo) @inlineCallbacks def _getProfileObj(self, context, profileName, profile): client = context.client if client.auth == TSServerClient.AUTH_MASTER or \ client.auth == TSServerClient.AUTH_ADMIN: userId = profile.userId if profile.userId is not None else 0 else: agentId = client.getId() userId = self.userAgent.agentUsers[agentId] profileSet = yield self.dbStore.find( ExperimentProfile, And(ExperimentProfile.name == unicode(profileName), ExperimentProfile.userId == userId)) returnValue((profileSet.one(), userId)) @TSMethodImpl(ExpSvcAgent.listProfiles) @inlineCallbacks def listProfiles(self, context): '''If context is of administrative rights (master.key or admin), list all experiments, or select only owned experiment''' client = context.client # TODO: Support for experiment sharing profiles = {} if client.auth == TSServerClient.AUTH_MASTER or \ client.auth == TSServerClient.AUTH_ADMIN: profileSet = yield self.dbStore.find(ExperimentProfile) else: agentId = client.getId() userId = self.userAgent.agentUsers[agentId] profileSet = yield self.dbStore.find( ExperimentProfile, ExperimentProfile.userId == userId) for profileObj in profileSet: profiles[profileObj.name] = _profileObjToTSO( profileObj, TSExperimentProfileInfo) returnValue(profiles) @TSMethodImpl(ExpSvcAgent.getProfile) @inlineCallbacks def getProfile(self, context, profileName, profile): profileObj, _ = yield self._getProfileObj(context, profileName, profile) fullProfile = _profileObjToTSO(profileObj, TSExperimentProfile) fullProfile.threadpools = {} fullProfile.workloads = {} threadpoolSet = yield self.dbStore.find( ExperimentThreadPool, ExperimentThreadPool.profile == profileObj) for threadpoolObj in threadpoolSet: threadpool = TSExperimentThreadPool() threadpool.agentId = threadpoolObj.aid threadpool.numWorkers = threadpoolObj.numWorkers fullProfile.threadpools[threadpoolObj.name] = threadpool workloadSet = yield self.dbStore.find( ExperimentWorkload, ExperimentWorkload.profile == profileObj) for workloadObj in workloadSet: workload = TSExperimentWorkload() workload.agentId = workloadObj.threadpool.aid if workloadObj.threadpool is not None else -1 workload.workloadType = workloadObj.workloadType.name if workloadObj.workloadType is not None else '' workload.threadpool = workloadObj.threadpool.name if workloadObj.threadpool is not None else '' workload.params = workloadObj.params fullProfile.workloads[workloadObj.name] = workload returnValue(fullProfile) @TSMethodImpl(ExpSvcAgent.configureProfile) @inlineCallbacks def configureProfile(self, context, profileName, profile): profileObj, userId = yield self._getProfileObj(context, profileName, profile) newProfile = False threadpools = {} if profileObj is None: profileObj = ExperimentProfile() profileObj.name = unicode(profileName) profileObj.userId = userId profileObj.creationDate = datetime.now() newProfile = True profileObj.description = unicode(profile.description) yield self.dbStore.add(profileObj) if not newProfile: threadpoolSet = yield self.dbStore.find( ExperimentThreadPool, ExperimentThreadPool.profile == profileObj) # Update or remove existing threadpools for threadpoolObj in threadpoolSet: if threadpoolObj.name in profile.threadpools: threadpool = profile.threadpools[threadpoolObj.name] threadpoolObj.aid = threadpool.agentId threadpoolObj.numWorkers = threadpool.numWorkers del profile.threadpools[threadpoolObj.name] yield self.dbStore.add(threadpoolObj) threadpools[threadpoolObj.name] = threadpoolObj else: yield self.dbStore.remove(threadpoolObj) # Add new threadpools for threadpoolName, threadpool in profile.threadpools.iteritems(): threadpoolObj = ExperimentThreadPool() threadpoolObj.name = unicode(threadpoolName) threadpoolObj.aid = threadpool.agentId threadpoolObj.profile = profileObj threadpoolObj.numWorkers = threadpool.numWorkers threadpools[threadpoolName] = threadpoolObj yield self.dbStore.add(threadpoolObj) @inlineCallbacks def _setWorkloadType(workload, workloadObj): if workload.workloadType is not None and workload.agentId is not None: workloadTypeSet = yield self.dbStore.find( WorkloadType, And(WorkloadType.aid == workload.agentId, WorkloadType.name == unicode(workload.workloadType))) workloadTypeObj = workloadTypeSet.one() workloadObj.wltid = workloadTypeObj.id else: workloadObj.wltid = None def _setThreadpool(workload, workloadObj): workloadObj.tpid = threadpools[workloadObj.name].id \ if workload.threadpool is not None \ else None if not newProfile: workloadSet = yield self.dbStore.find( ExperimentWorkload, ExperimentWorkload.profile == profileObj) for workloadObj in workloadSet: if workloadObj.name in profile.workloads: workload = profile.workloads[workloadObj.name] _setThreadpool(workload, workloadObj) yield _setWorkloadType(workload, workloadObj) workloadObj.params = workload.params del profile.workloads[workloadObj.name] yield self.dbStore.add(workloadObj) else: yield self.dbStore.remove(workloadObj) for workloadName, workload in profile.workloads.iteritems(): workloadObj = ExperimentWorkload() workloadObj.name = unicode(workloadName) workloadObj.profile = profileObj _setThreadpool(workload, workloadObj) yield _setWorkloadType(workload, workloadObj) workloadObj.params = workload.params # TODO: implement workload steps workloadObj.stepsId = None yield self.dbStore.add(workloadObj) yield self.dbStore.commit()
class TSUserAgent(TSLocalAgent): agentId = userAgentId uuid = userAgentUUID agentType = userAgentType def __init__(self, server, connString): TSLocalAgent.__init__(self, server) self.client.getId() self.logger = logging.getLogger('UserAgent') self.rootAgent = server.localAgents[0] self.agentUsers = {} self.authServices = {'local': LocalAuth()} self.database = create_database(connString) self.dbStore = Store(self.database) self.server.listenerFlows.append( Flow(dstAgentId=userAgentId, command='authUser')) @TSMethodImpl(UserAgent.authUser) def authUser(self, context, **kw): @inlineCallbacks def implementation(context, userName, userPassword): userSet = yield self.dbStore.find(User, User.name == str(userName)) user = yield userSet.one() self.logger.info('Authorizing user %s', userName) if user is None: self.logger.warning('Error authorizing user: no such user: %s', userName) raise UserAuthError('No such user: %s' % userName) authMethod = self.authServices[user.authService] if authMethod.authentificate(user, userPassword): agentId = context.client.getId() self.agentUsers[agentId] = user.id roles = yield user.roles role = self._setupRoles(context.client, roles) userDescr = TSUserDescriptor() userDescr.name = user.gecosName userDescr.role = role returnValue(userDescr) return implementation(context, **kw) def _setupRoles(self, client, roles): # First pass - identify maximum role maxRole = TSServerClient.AUTH_NONE for role in roles: if role.role == 'admin': maxRole = TSServerClient.AUTH_ADMIN elif role.role == 'operator' and maxRole != TSServerClient.AUTH_ADMIN: maxRole = TSServerClient.AUTH_OPERATOR else: maxRole = TSServerClient.AUTH_USER client.authorize(maxRole) if maxRole != TSServerClient.AUTH_ADMIN: # TODO: For user/operator need to set ACLs pass return maxRole def onDisconnect(self): self.dbStore.close()
class TSUserAgent(TSLocalAgent): agentId = userAgentId uuid = userAgentUUID agentType = userAgentType def __init__(self, server, connString): TSLocalAgent.__init__(self, server) self.client.getId() self.logger = logging.getLogger('UserAgent') self.rootAgent = server.localAgents[0] self.agentUsers = {} self.authServices = {'local': LocalAuth()} self.database = create_database(connString) self.dbStore = Store(self.database) self.server.listenerFlows.append(Flow(dstAgentId = userAgentId, command = 'authUser')) @TSMethodImpl(UserAgent.authUser) def authUser(self, context, **kw): @inlineCallbacks def implementation(context, userName, userPassword): userSet = yield self.dbStore.find(User, User.name == str(userName)) user = yield userSet.one() self.logger.info('Authorizing user %s', userName) if user is None: self.logger.warning('Error authorizing user: no such user: %s', userName) raise UserAuthError('No such user: %s' % userName) authMethod = self.authServices[user.authService] if authMethod.authentificate(user, userPassword): agentId = context.client.getId() self.agentUsers[agentId] = user.id roles = yield user.roles role = self._setupRoles(context.client, roles) userDescr = TSUserDescriptor() userDescr.name = user.gecosName userDescr.role = role returnValue(userDescr) return implementation(context, **kw) def _setupRoles(self, client, roles): # First pass - identify maximum role maxRole = TSServerClient.AUTH_NONE for role in roles: if role.role == 'admin': maxRole = TSServerClient.AUTH_ADMIN elif role.role == 'operator' and maxRole != TSServerClient.AUTH_ADMIN: maxRole = TSServerClient.AUTH_OPERATOR else: maxRole = TSServerClient.AUTH_USER client.authorize(maxRole) if maxRole != TSServerClient.AUTH_ADMIN: # TODO: For user/operator need to set ACLs pass return maxRole def onDisconnect(self): self.dbStore.close()
red_rose.kind = flowers store.add(red_rose) violet = Thing() violet.name = u'Violet' violet.kind = flowers store.add(violet) vases = Kind() vases.name = u"Vases" store.add(vases) amphora = Thing() amphora.name= u'Amphora' amphora.kind = vases; store.add(amphora) store.commit() all_flowers = store.find((Kind, Thing), Thing.kind_id == Kind.id, Kind.name == u'Flowers') print [(kind.name, thing.name) for kind, thing in all_flowers] all_vases = store.find((Kind, Thing), Thing.kind_id == Kind.id, Kind.name == u'Vases') print [(kind.name, thing.name) for kind, thing in all_vases]