def add_lap(self): lap = RacerLap() lap.racer = self lap.race = self.race lap.event_time = datetime.datetime.now() Store.of(self).add(lap) return lap
def _remove_permission(permission): if permission is None: # The permission has already been removed, so there's nothing more # to do here. return else: Store.of(permission).remove(permission)
def test_BranchJobPruner(self): # Garbo should remove jobs completed over 30 days ago. switch_dbuser('testadmin') store = IMasterStore(Job) db_branch = self.factory.makeAnyBranch() db_branch.branch_format = BranchFormat.BZR_BRANCH_5 db_branch.repository_format = RepositoryFormat.BZR_KNIT_1 Store.of(db_branch).flush() branch_job = BranchUpgradeJob.create( db_branch, self.factory.makePerson()) branch_job.job.date_finished = THIRTY_DAYS_AGO self.assertEqual( store.find( BranchJob, BranchJob.branch == db_branch.id).count(), 1) self.runDaily() switch_dbuser('testadmin') self.assertEqual( store.find( BranchJob, BranchJob.branch == db_branch.id).count(), 0)
def create(self, name, uri=None): """Create a new store called C{name}. @param uri: Optionally, the URI to use. @raises ZStormError: Raised if C{uri} is None and no default URI exists for C{name}. Also raised if a store with C{name} already exists. """ if uri is None: database = self._default_databases.get(name) if database is None: raise ZStormError("Store named '%s' not found" % name) else: database = self._get_database(uri) if name is not None and self._named.get(name) is not None: raise ZStormError("Store named '%s' already exists" % name) store = Store(database) store.__synchronizer = StoreSynchronizer(store) self._stores[id(store)] = store if name is not None: self._named[name] = store self._name_index[store] = name return store
def _process_certificate(self, cert): """ Process an Client Certificate. """ try: nia = cert.get_subject().get_components()[4][1].split( 'NIA')[1].lstrip() except: return { 'success': False, 'message': 'El certificado no es válido.' } store = Store(Database().get_database()) result = store.find(UserProfile, UserProfile.nia == int(nia)).one() if not result: return { 'success': False, 'message': 'Este certificado digital no tiene un usuario asociado.' } user = store.find(UserData, UserData.id == result.user_id).one() if not user: return { 'success': False, 'message': 'Este certificado digital no tiene un usuario asociado.' } return {'success': True, 'user': user}
def _createPreviewDiff(self, line_count=0, added=None, removed=None, conflicts=None, diffstat=None): # Login an admin to avoid the launchpad.Edit requirements. login("*****@*****.**") # Create a dummy preview diff, and make sure the branches have the # correct last scanned ids to ensure that the new diff is not stale. bmp = self.factory.makeBranchMergeProposal() if line_count: content = "".join(unified_diff("", "random content")) else: content = "" preview = bmp.updatePreviewDiff(content, u"rev-a", u"rev-b", conflicts=conflicts) bmp.source_branch.last_scanned_id = preview.source_revision_id bmp.target_branch.last_scanned_id = preview.target_revision_id # Update the values directly sidestepping the security. naked_diff = removeSecurityProxy(preview) naked_diff.diff_lines_count = line_count naked_diff.added_lines_count = added naked_diff.removed_lines_count = removed naked_diff.diffstat = diffstat # In order to get the canonical url of the librarian file, we need to # commit. # transaction.commit() # Make sure that the preview diff is in the db for the test. # Storm bug: 324724 Store.of(bmp).flush() return preview
def cleanUp(self): """See `IBuildFarmJob`.""" # This class is not itself database-backed. But it delegates to # one that is. We can't call its SQLObject destroySelf method # though, because then the BuildQueue and the BranchJob would # both try to delete the attached Job. Store.of(self.context).remove(self.context)
def test_preloads_irc_nicks_and_preferredemail(self): """Test that IRC nicks and preferred email addresses are preloaded.""" # Create three people with IRC nicks, and one without. people = [] for num in range(3): person = self.factory.makePerson(displayname='foobar %d' % num) getUtility(IIrcIDSet).new(person, 'launchpad', person.name) people.append(person) people.append(self.factory.makePerson(displayname='foobar 4')) # Remember the current values for checking later, and throw out # the cache. expected_nicks = dict( (person.id, list(person.ircnicknames)) for person in people) expected_emails = dict( (person.id, person.preferredemail) for person in people) Store.of(people[0]).invalidate() results = list(self.searchVocabulary(None, u'foobar')) with StormStatementRecorder() as recorder: self.assertEquals(4, len(results)) for person in results: self.assertEqual( expected_nicks[person.id], person.ircnicknames) self.assertEqual( expected_emails[person.id], person.preferredemail) self.assertThat(recorder, HasQueryCount(Equals(0)))
def cleanUp(self): """See `IBuildFarmJob`. Classes that derive from BuildFarmJobOld need to clean up after themselves correctly. """ Store.of(self).remove(self)
def test_implements_interface(self): # The implementation implements the interface correctly. dsd_comment = self.factory.makeDistroSeriesDifferenceComment() # Flush the store to ensure db constraints are triggered. Store.of(dsd_comment).flush() verifyObject(IDistroSeriesDifferenceComment, dsd_comment)
def create(self, name, uri=None): """Create a new store called C{name}. @param uri: Optionally, the URI to use. @raises ZStormError: Raised if C{uri} is None and no default URI exists for C{name}. Also raised if a store with C{name} already exists. """ if uri is None: database = self._default_databases.get(name) if database is None: raise ZStormError("Store named '%s' not found" % name) else: database = self._get_database(uri) if name is not None and self._named.get(name) is not None: raise ZStormError("Store named '%s' already exists" % name) store = Store(database) store._register_for_txn = True store._tpc = self._default_tpcs.get(name, False) store._event.hook( "register-transaction", register_store_with_transaction, weakref.ref(self)) self._stores[id(store)] = store if name is not None: self._named[name] = store self._name_index[store] = name return store
def _test_AnswerContactPruner(self, status, interval, expected_count=0): # Garbo should remove answer contacts for accounts with given 'status' # which was set more than 'interval' days ago. switch_dbuser('testadmin') store = IMasterStore(AnswerContact) person = self.factory.makePerson() person.addLanguage(getUtility(ILanguageSet)['en']) question = self.factory.makeQuestion() with person_logged_in(question.owner): question.target.addAnswerContact(person, person) Store.of(question).flush() self.assertEqual( store.find( AnswerContact, AnswerContact.person == person.id).count(), 1) account = person.account account.status = status # We flush because a trigger sets the date_status_set and we need to # modify it ourselves. Store.of(account).flush() if interval is not None: account.date_status_set = interval self.runDaily() switch_dbuser('testadmin') self.assertEqual( store.find( AnswerContact, AnswerContact.person == person.id).count(), expected_count)
def as_subsonic_child(self, user): info = { 'id': str(self.id), 'isDir': True, 'title': self.name, 'album': self.name, 'created': self.created.isoformat() } if not self.root: info['parent'] = str(self.parent_id) info['artist'] = self.parent.name if self.has_cover_art: info['coverArt'] = str(self.id) starred = Store.of(self).get(StarredFolder, (user.id, self.id)) if starred: info['starred'] = starred.date.isoformat() rating = Store.of(self).get(RatingFolder, (user.id, self.id)) if rating: info['userRating'] = rating.rating avgRating = Store.of(self).find(RatingFolder, RatingFolder.rated_id == self.id).avg(RatingFolder.rating) if avgRating: info['averageRating'] = avgRating return info
def test_search_query_count(self): # Verify query count. Store.of(self.milestone).flush() with StormStatementRecorder() as recorder: list(self.bugtask_set.search(self.params)) # 1 query for the tasks, 1 query for the product (target) eager # loading. self.assertThat(recorder, HasQueryCount(Equals(4)))
def test_getForDifference(self): # The utility can get comments by id. dsd_comment = self.factory.makeDistroSeriesDifferenceComment() Store.of(dsd_comment).flush() self.assertEqual( dsd_comment, get_comment_source().getForDifference( dsd_comment.distro_series_difference, dsd_comment.id))
def test_unmask_right_message(self): # Unmasking picks the right message, and doesn't try to violate # the unique constraint on is_current_upstream. inactive = self.factory.makeSuggestion(self.pofile, self.potmsgset) remove_translations(ids=[self.ubuntu.id]) Store.of(self.upstream).autoreload() self.assertTrue(self.upstream.is_current_ubuntu) self.assertFalse(inactive.is_current_ubuntu)
def test_acquireRevisionAuthors(self): # AcquireRevisionAuthors creates new authors only if none exists with # that name. author1 = self.revision_set.acquireRevisionAuthors(['name1'])['name1'] self.assertEqual(author1.name, 'name1') Store.of(author1).flush() author2 = self.revision_set.acquireRevisionAuthors(['name1'])['name1'] self.assertEqual( removeSecurityProxy(author1).id, removeSecurityProxy(author2).id)
def test_product_affiliation_query_count(self): # Only 2 queries are expected, selects from: # - Product, Person person = self.factory.makePerson() product = self.factory.makeProduct(owner=person, name='pting') Store.of(product).invalidate() with StormStatementRecorder() as recorder: IHasAffiliation(product).getAffiliationBadges([person]) self.assertThat(recorder, HasQueryCount(Equals(4)))
def new(self, bug, datechanged, person, whatchanged, oldvalue=None, newvalue=None, message=None): """See IBugActivitySet.""" activity = BugActivity( bug=bug, datechanged=datechanged, person=person, whatchanged=whatchanged, oldvalue=oldvalue, newvalue=newvalue, message=message) Store.of(activity).flush() return activity
def test_derivatives_email(self): # Make sure the package_derivatives_email column stores data # correctly. email = "*****@*****.**" distro = self.factory.makeDistribution() with person_logged_in(distro.owner): distro.package_derivatives_email = email Store.of(distro).flush() self.assertEqual(email, distro.package_derivatives_email)
def test_search_query_count(self): # Verify query count. # 1. Query all the distroseries to determine the distro's # currentseries. # 2. Query the bugtasks. Store.of(self.milestone).flush() with StormStatementRecorder() as recorder: list(self.bugtask_set.search(self.params)) self.assertThat(recorder, HasQueryCount(Equals(4)))
def test_permissions(self): # The branch scanner creates TranslationTemplatesBuilds. It has # the database privileges it needs for that. branch = self.factory.makeBranch() switch_dbuser("branchscanner") build = getUtility(ITranslationTemplatesBuildSource).create(branch) # Writing the new objects to the database violates no access # restrictions. Store.of(build).flush()
def test_deleteIncludesResult(self): """Ensure deleting CodeImport objects deletes associated results.""" code_import_result = self.factory.makeCodeImportResult() code_import_result_id = code_import_result.id CodeImportSet().delete(code_import_result.code_import) # CodeImportResult.get should not raise anything. # But since it populates the object cache, we must invalidate it. Store.of(code_import_result).invalidate(code_import_result) self.assertRaises( SQLObjectNotFound, CodeImportResult.get, code_import_result_id)
def linkBranch(self, branch, registrant): branch_link = self.getBranchLink(branch) if branch_link is not None: return branch_link branch_link = SpecificationBranch( specification=self, branch=branch, registrant=registrant) Store.of(self).flush() del get_property_cache(self).linked_branches notify(ObjectCreatedEvent(branch_link)) return branch_link
def test_upload_log_url(self): # The url of the upload log file is determined by the PackageBuild. Store.of(self.package_build).flush() self.package_build.storeUploadLog("Some content") log_url = self.package_build.upload_log_url self.failUnlessEqual( 'http://launchpad.dev/~joe/' '+archive/ppa/+recipebuild/%d/+files/upload_%d_log.txt' % ( self.package_build.id, self.package_build.id), log_url)
def __eq__(self, other): if type(self) is not type(other): return False from stoqlib.lib.environment import is_developer_mode if is_developer_mode(): # Check this only in develper mode to get as many potential errors # as possible. assert Store.of(self) is Store.of(other) return self.id == other.id
def test_distro_affiliation_query_count(self): # Only 2 business queries are expected, selects from: # - Distribution, Person # plus an additional query to create a PublisherConfig record. person = self.factory.makePerson() distro = self.factory.makeDistribution(owner=person, name='pting') Store.of(distro).invalidate() with StormStatementRecorder() as recorder: IHasAffiliation(distro).getAffiliationBadges([person]) self.assertThat(recorder, HasQueryCount(Equals(3)))
def createProductMilestone( self, milestone_name, product_name, date_expected): """Create a milestone in the trunk series of a product.""" product_set = getUtility(IProductSet) product = product_set[product_name] series = product.getSeries('trunk') milestone = series.newMilestone( name=milestone_name, dateexpected=date_expected) Store.of(milestone).flush() return milestone
def createUserDB(connString): '''Creates user database and default admin account with password admin''' from tsload.user.localauth import LocalAuth database = create_database(connString) store = Store(database) TableSchema(database, User).create(store) TableSchema(database, Role).create(store) localAuth = LocalAuth() admin = User() admin.name = 'admin' admin.gecosName = u'TSLoad Administrator' admin.authService = 'local' localAuth.changePassword(admin, 'admin') store.add(admin) adminRole = Role() adminRole.user = admin adminRole.role = 'admin' store.add(adminRole) store.commit() store.close()
def addNotificationRecipient(self, notification, person): # Manually insert BugNotificationRecipient for # construct_email_notifications to work. # Not sure why using SQLObject constructor doesn't work (it # tries to insert a row with only the ID which fails). Store.of(notification).execute(""" INSERT INTO BugNotificationRecipient (bug_notification, person, reason_header, reason_body) VALUES (%s, %s, %s, %s)""" % sqlvalues( notification, person, u'reason header', u'reason body'))
def delete(cls, id, store=None): warnings.warn("use store.remove()", DeprecationWarning, stacklevel=2) obj = store.get(cls, id) Store.of(obj).remove(obj)
def unlinkMilestone(self, milestone): """See `ISpecificationWorkItemSet`.""" Store.of(milestone).find( SpecificationWorkItem, milestone_id=milestone.id).set(milestone_id=None)
def test_create(self): person = self.Person(name="John Joe") self.assertTrue(Store.of(person) is self.store) self.assertEquals(type(person.id), int) self.assertEquals(person.name, "John Joe")
def destroySelf(self): Store.of(self).remove(self)
def delete(self): """See `IBugSubscriptionFilter`.""" BugSubscriptionFilter.deleteMultiple([self.id]) Store.of(self).remove(self)
def bugtasks(self): tasks = Store.of(self).find(BugTask, BugTask.bugwatch == self.id) tasks = tasks.order_by(Desc(BugTask.datecreated)) return shortlist(tasks, 10, 100)
def getByBuildFarmJob(cls, build_farm_job): """See `ISpecificBuildFarmJobSource`.""" return Store.of(build_farm_job).find( cls, build_farm_job_id=build_farm_job.id).one()
def perform_schema_migration(version): """ @param version: @return: """ to_delete_on_fail = [] to_delete_on_success = [] if version < FIRST_DATABASE_VERSION_SUPPORTED: log.msg( "Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED) quit() tmpdir = os.path.abspath(os.path.join(GLSettings.db_path, 'tmp')) orig_db_file = os.path.abspath( os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version)) final_db_file = os.path.abspath( os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION)) shutil.rmtree(tmpdir, True) os.mkdir(tmpdir) shutil.copy2(orig_db_file, tmpdir) new_db_file = None try: while version < DATABASE_VERSION: old_db_file = os.path.abspath( os.path.join(tmpdir, 'glbackend-%d.db' % version)) new_db_file = os.path.abspath( os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1))) GLSettings.db_file = new_db_file GLSettings.enable_input_length_checks = False to_delete_on_fail.append(new_db_file) to_delete_on_success.append(old_db_file) log.msg("Updating DB from version %d to version %d" % (version, version + 1)) store_old = Store(create_database('sqlite:' + old_db_file)) store_new = Store(create_database('sqlite:' + new_db_file)) # Here is instanced the migration script MigrationModule = importlib.import_module( "globaleaks.db.migrations.update_%d" % (version + 1)) migration_script = MigrationModule.MigrationScript( migration_mapping, version, store_old, store_new) log.msg("Migrating table:") try: try: migration_script.prologue() except Exception as exception: log.err("Failure while executing migration prologue: %s" % exception) raise exception for model_name, _ in migration_mapping.iteritems(): if migration_script.model_from[ model_name] is not None and migration_script.model_to[ model_name] is not None: try: migration_script.migrate_model(model_name) # Commit at every table migration in order to be able to detect # the precise migration that may fail. migration_script.commit() except Exception as exception: log.err("Failure while migrating table %s: %s " % (model_name, exception)) raise exception try: migration_script.epilogue() migration_script.commit() except Exception as exception: log.err("Failure while executing migration epilogue: %s " % exception) raise exception finally: # the database should be always closed before leaving the application # in order to not keep leaking journal files. migration_script.close() log.msg("Migration stats:") # we open a new db in order to verify integrity of the generated file store_verify = Store( create_database(GLSettings.make_db_uri(new_db_file))) for model_name, _ in migration_mapping.iteritems(): if model_name == 'ApplicationData': continue if migration_script.model_from[ model_name] is not None and migration_script.model_to[ model_name] is not None: count = store_verify.find( migration_script.model_to[model_name]).count() if migration_script.entries_count[model_name] != count: if migration_script.fail_on_count_mismatch[model_name]: raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \ (model_name, count, migration_script.entries_count[model_name])) else: log.msg(" * %s table migrated (entries count changed from %d to %d)" % \ (model_name, migration_script.entries_count[model_name], count)) else: log.msg(" * %s table migrated (%d entry(s))" % \ (model_name, migration_script.entries_count[model_name])) version += 1 store_verify.close() perform_data_update(new_db_file) except Exception: raise else: # in case of success first copy the new migrated db, then as last action delete the original db file shutil.copy(new_db_file, final_db_file) security.overwrite_and_remove(orig_db_file) finally: # Always cleanup the temporary directory used for the migration for f in os.listdir(tmpdir): tmp_db_file = os.path.join(tmpdir, f) security.overwrite_and_remove(tmp_db_file) shutil.rmtree(tmpdir)
def addFile(self, name, size, file, contentType, expires=None, debugID=None, allow_zero_length=False): """Add a file to the librarian. :param name: Name to store the file as :param size: Size of the file :param file: File-like object with the content in it :param contentType: mime-type, e.g. text/plain :param expires: Expiry time of file. See LibrarianGarbageCollection. Set to None to only expire when it is no longer referenced. :param debugID: Optional. If set, causes extra logging for this request on the server, which will be marked with the value given. :param allow_zero_length: If True permit zero length files. :returns: aliasID as an integer :raises UploadFailed: If the server rejects the upload for some reason. """ if file is None: raise TypeError('Bad File Descriptor: %s' % repr(file)) if allow_zero_length: min_size = -1 else: min_size = 0 if size <= min_size: raise UploadFailed('Invalid length: %d' % size) if isinstance(name, unicode): name = name.encode('utf-8') # Import in this method to avoid a circular import from lp.services.librarian.model import LibraryFileContent from lp.services.librarian.model import LibraryFileAlias self._connect() try: # Get the name of the database the client is using, so that # the server can check that the client is using the same # database as the server. store = IMasterStore(LibraryFileAlias) databaseName = self._getDatabaseName(store) # Generate new content and alias IDs. # (we'll create rows with these IDs later, but not yet) contentID = store.execute( "SELECT nextval('libraryfilecontent_id_seq')").get_one()[0] aliasID = store.execute( "SELECT nextval('libraryfilealias_id_seq')").get_one()[0] # Send command self._sendLine('STORE %d %s' % (size, name)) # Send headers self._sendHeader('Database-Name', databaseName) self._sendHeader('File-Content-ID', contentID) self._sendHeader('File-Alias-ID', aliasID) if debugID is not None: self._sendHeader('Debug-ID', debugID) # Send blank line. Do not check for a response from the # server when no data will be sent. Otherwise # _checkError() might consume the "200" response which # is supposed to be read below in this method. self._sendLine('', check_for_error_responses=(size > 0)) # Prepare to the upload the file md5_digester = hashlib.md5() sha1_digester = hashlib.sha1() sha256_digester = hashlib.sha256() bytesWritten = 0 # Read in and upload the file 64kb at a time, by using the two-arg # form of iter (see # /usr/share/doc/python/html/library/functions.html#iter). for chunk in iter(lambda: file.read(1024 * 64), ''): self.state.f.write(chunk) bytesWritten += len(chunk) md5_digester.update(chunk) sha1_digester.update(chunk) sha256_digester.update(chunk) assert bytesWritten == size, ( 'size is %d, but %d were read from the file' % (size, bytesWritten)) self.state.f.flush() # Read response response = self.state.f.readline().strip() if response != '200': raise UploadFailed('Server said: ' + response) # Add rows to DB content = LibraryFileContent( id=contentID, filesize=size, sha256=sha256_digester.hexdigest(), sha1=sha1_digester.hexdigest(), md5=md5_digester.hexdigest()) LibraryFileAlias( id=aliasID, content=content, filename=name.decode('UTF-8'), mimetype=contentType, expires=expires, restricted=self.restricted) Store.of(content).flush() assert isinstance(aliasID, (int, long)), \ "aliasID %r not an integer" % (aliasID, ) return aliasID finally: self._close()
def all_specifications(self): return Store.of(self).find( Specification, Specification.productseriesID == self.id)
def productserieslanguages(self): """See `IProductSeries`.""" store = Store.of(self) english = getUtility(ILaunchpadCelebrities).english results = [] if self.potemplate_count == 1: # If there is only one POTemplate in a ProductSeries, fetch # Languages and corresponding POFiles with one query, along # with their stats, and put them into ProductSeriesLanguage # objects. origin = [Language, POFile, POTemplate] query = store.using(*origin).find( (Language, POFile), POFile.language == Language.id, Language.visible == True, POFile.potemplate == POTemplate.id, POTemplate.productseries == self, POTemplate.iscurrent == True, Language.id != english.id) ordered_results = query.order_by(['Language.englishname']) for language, pofile in ordered_results: psl = ProductSeriesLanguage(self, language, pofile=pofile) total = pofile.potemplate.messageCount() imported = pofile.currentCount() changed = pofile.updatesCount() rosetta = pofile.rosettaCount() unreviewed = pofile.unreviewedCount() translated = imported + rosetta new = rosetta - changed psl.setCounts(total, translated, new, changed, unreviewed) psl.last_changed_date = pofile.date_changed results.append(psl) else: # If there is more than one template, do a single # query to count total messages in all templates. query = store.find( Sum(POTemplate.messagecount), POTemplate.productseries == self, POTemplate.iscurrent == True) total, = query # And another query to fetch all Languages with translations # in this ProductSeries, along with their cumulative stats # for imported, changed, rosetta-provided and unreviewed # translations. query = store.find( (Language, Sum(POFile.currentcount), Sum(POFile.updatescount), Sum(POFile.rosettacount), Sum(POFile.unreviewed_count), Max(POFile.date_changed)), POFile.language == Language.id, Language.visible == True, POFile.potemplate == POTemplate.id, POTemplate.productseries == self, POTemplate.iscurrent == True, Language.id != english.id).group_by(Language) ordered_results = query.order_by(['Language.englishname']) for (language, imported, changed, rosetta, unreviewed, last_changed) in ordered_results: psl = ProductSeriesLanguage(self, language) translated = imported + rosetta new = rosetta - changed psl.setCounts(total, translated, new, changed, unreviewed) psl.last_changed_date = last_changed results.append(psl) return results
def setUp(self): super(TestBinaryAndSourcePackageNameVocabulary, self).setUp() self.vocabulary = BinaryAndSourcePackageNameVocabulary() spn = self.factory.makeSourcePackageName(name='bedbugs') self.bspn = Store.of(spn).find( BinaryAndSourcePackageName, name=spn.name).one()
class TSExperimentSvcAgent(TSLocalAgent): agentId = expsvcAgentId uuid = expsvcAgentUUID agentType = expsvcAgentType def __init__(self, server, connString): TSLocalAgent.__init__(self, server) self.rootAgent = server.localAgents[0] self.userAgent = server.localAgents[1] self.database = create_database(connString) self.dbStore = Store(self.database) self.server.listenerAgents.append( AgentListener('load', self.onAgentRegister, self.onAgentDisconnect)) self.loadAgents = {} self.resourceManager = ResourceManager(self.dbStore) @inlineCallbacks def registerNewAgent(self, client, agent): hostInfo = yield agent.getHostInfo() agentObj = Agent() agentObj.uuid = uuid.UUID(client.agentUuid) agentObj.agentType = 'load' # FIXME: Sometimes domainname passed with hostname agentObj.hostname = unicode(hostInfo.hostname) agentObj.domainname = unicode(hostInfo.domainname) agentObj.osname = hostInfo.osname agentObj.release = hostInfo.release agentObj.machineArch = hostInfo.machineArch agentObj.numCPUs = hostInfo.numCPUs agentObj.numCores = hostInfo.numCores agentObj.memTotal = hostInfo.memTotal agentObj.lastOnline = datetime.now() yield self.dbStore.add(agentObj) yield self.dbStore.commit() returnValue(agentObj) @inlineCallbacks def onAgentRegister(self, client): if client.agentType == 'load': if client.agentUuid in self.loadAgents: raise JSONTS.Error( JSONTS.AE_INVALID_STATE, "Loader agent with uuid '%s' already registered" % client.agentUuid) agentId = client.getId() agent = self.createRemoteAgent(agentId, LoadAgent) agentSet = yield self.dbStore.find( Agent, Agent.uuid == uuid.UUID(client.agentUuid)) agentObj = yield agentSet.one() if agentObj is None: agentObj = yield self.registerNewAgent(client, agent) else: # Update last online timestamp agentObj.lastOnline = datetime.now() yield self.dbStore.add(agentObj) yield self.dbStore.commit() agentInfo = LoadAgentInfo(agent, agentObj) self.loadAgents[client.agentUuid] = agentInfo print "Registered agent %s with uuid '%s'" % (agentObj.hostname, client.agentUuid) reactor.callLater(0.0, self.fetchWorkloadTypes, agent, agentObj) reactor.callLater(0.1, self.resourceManager.registerLoadAgent, agent, agentObj) def onAgentDisconnect(self, client): if client.agentType == 'load': yield self.resourceManager.unregisterLoadAgent(agentInfo.agentObj) agentInfo = self.loadAgents[client.agentUuid] print 'Disconnected agent %s' % agentInfo.agentObj.hostname del self.loadAgents[client.agentUuid] @inlineCallbacks def fetchWorkloadTypes(self, agent, agentObj): wltypeList = yield agent.getWorkloadTypes() wltSet = yield self.dbStore.find(WorkloadType, WorkloadType.agent == agentObj) for wltypeName, wltype in wltypeList.iteritems(): wltObj = wltSet.find(WorkloadType.name == wltypeName) wltObj = wltObj.any() if wltObj is None: wltObj = WorkloadType() wltObj.agent = agentObj wltObj.name = wltypeName wltObj.module = wltype.module wltObj.modulePath = wltype.path wltObj.classList = ','.join(wltype.wlclass) yield self.dbStore.add(wltObj) paramSet = yield self.dbStore.find( WorkloadParam, WorkloadParam.workloadType == wltObj) # Update parameter list for paramObj in paramSet: if paramObj.name not in wltype.params: paramObj.remove() continue paramObj.data = wltype.params.serialize() # Remove serialized object from params array del wltype.params[paramObj.name] yield self.dbStore.add(paramObj) for paramName, param in wltype.params.iteritems(): paramObj = WorkloadParam() paramObj.name = paramName paramObj.workloadType = wltObj paramObj.paramData = TSWorkloadParameter.serialize(param) yield self.dbStore.add(paramObj) yield self.dbStore.commit() @TSMethodImpl(ExpSvcAgent.listAgents) @inlineCallbacks def listAgents(self, context): agentsList = {} agentSet = yield self.dbStore.find(Agent) for agentObj in agentSet: # TODO: should filter agents according to users ACL agentUuid = str(agentObj.uuid) descriptor = TSExpSvcAgentDescriptor() descriptor.agentId = agentObj.id descriptor.lastOnline = datetimeToTSTime(agentObj.lastOnline) descriptor.isOnline = agentUuid in self.loadAgents for field in ('hostname', 'domainname', 'osname', 'release', 'machineArch', 'numCPUs', 'numCores', 'memTotal'): setattr(descriptor, field, getattr(agentObj, field)) agentsList[agentUuid] = descriptor returnValue(agentsList) @TSMethodImpl(ExpSvcAgent.getWorkloadTypes) @inlineCallbacks def getWorkloadTypes(self, context, agentId): wltSet = yield self.dbStore.find(WorkloadType, WorkloadType.aid == agentId) paramsQuery = (WorkloadParam, Join( WorkloadType, And(WorkloadType.aid == agentId, WorkloadParam.wltid == WorkloadType.id))) paramsGlobalSet = yield self.dbStore.using( *paramsQuery).find(WorkloadParam) wltypeList = {} for wltObj in wltSet: paramsSet = yield paramsGlobalSet.find( WorkloadParam.workloadType == wltObj) wltype = TSWorkloadType() wltype.module = wltObj.module wltype.path = wltObj.modulePath wltype.wlclass = wltObj.classList.split(',') wltype.params = {} for paramObj in paramsSet: param = TSWorkloadParameter.deserialize(paramObj.paramData) wltype.params[paramObj.name] = param wltypeList[wltObj.name] = wltype returnValue(wltypeList) @TSMethodImpl(ExpSvcAgent.getAgentResources) @inlineCallbacks def getAgentResources(self, context, agentId): resourceInfo = yield self.resourceManager.getAgentResources(agentId) returnValue(resourceInfo) @inlineCallbacks def _getProfileObj(self, context, profileName, profile): client = context.client if client.auth == TSServerClient.AUTH_MASTER or \ client.auth == TSServerClient.AUTH_ADMIN: userId = profile.userId if profile.userId is not None else 0 else: agentId = client.getId() userId = self.userAgent.agentUsers[agentId] profileSet = yield self.dbStore.find( ExperimentProfile, And(ExperimentProfile.name == unicode(profileName), ExperimentProfile.userId == userId)) returnValue((profileSet.one(), userId)) @TSMethodImpl(ExpSvcAgent.listProfiles) @inlineCallbacks def listProfiles(self, context): '''If context is of administrative rights (master.key or admin), list all experiments, or select only owned experiment''' client = context.client # TODO: Support for experiment sharing profiles = {} if client.auth == TSServerClient.AUTH_MASTER or \ client.auth == TSServerClient.AUTH_ADMIN: profileSet = yield self.dbStore.find(ExperimentProfile) else: agentId = client.getId() userId = self.userAgent.agentUsers[agentId] profileSet = yield self.dbStore.find( ExperimentProfile, ExperimentProfile.userId == userId) for profileObj in profileSet: profiles[profileObj.name] = _profileObjToTSO( profileObj, TSExperimentProfileInfo) returnValue(profiles) @TSMethodImpl(ExpSvcAgent.getProfile) @inlineCallbacks def getProfile(self, context, profileName, profile): profileObj, _ = yield self._getProfileObj(context, profileName, profile) fullProfile = _profileObjToTSO(profileObj, TSExperimentProfile) fullProfile.threadpools = {} fullProfile.workloads = {} threadpoolSet = yield self.dbStore.find( ExperimentThreadPool, ExperimentThreadPool.profile == profileObj) for threadpoolObj in threadpoolSet: threadpool = TSExperimentThreadPool() threadpool.agentId = threadpoolObj.aid threadpool.numWorkers = threadpoolObj.numWorkers fullProfile.threadpools[threadpoolObj.name] = threadpool workloadSet = yield self.dbStore.find( ExperimentWorkload, ExperimentWorkload.profile == profileObj) for workloadObj in workloadSet: workload = TSExperimentWorkload() workload.agentId = workloadObj.threadpool.aid if workloadObj.threadpool is not None else -1 workload.workloadType = workloadObj.workloadType.name if workloadObj.workloadType is not None else '' workload.threadpool = workloadObj.threadpool.name if workloadObj.threadpool is not None else '' workload.params = workloadObj.params fullProfile.workloads[workloadObj.name] = workload returnValue(fullProfile) @TSMethodImpl(ExpSvcAgent.configureProfile) @inlineCallbacks def configureProfile(self, context, profileName, profile): profileObj, userId = yield self._getProfileObj(context, profileName, profile) newProfile = False threadpools = {} if profileObj is None: profileObj = ExperimentProfile() profileObj.name = unicode(profileName) profileObj.userId = userId profileObj.creationDate = datetime.now() newProfile = True profileObj.description = unicode(profile.description) yield self.dbStore.add(profileObj) if not newProfile: threadpoolSet = yield self.dbStore.find( ExperimentThreadPool, ExperimentThreadPool.profile == profileObj) # Update or remove existing threadpools for threadpoolObj in threadpoolSet: if threadpoolObj.name in profile.threadpools: threadpool = profile.threadpools[threadpoolObj.name] threadpoolObj.aid = threadpool.agentId threadpoolObj.numWorkers = threadpool.numWorkers del profile.threadpools[threadpoolObj.name] yield self.dbStore.add(threadpoolObj) threadpools[threadpoolObj.name] = threadpoolObj else: yield self.dbStore.remove(threadpoolObj) # Add new threadpools for threadpoolName, threadpool in profile.threadpools.iteritems(): threadpoolObj = ExperimentThreadPool() threadpoolObj.name = unicode(threadpoolName) threadpoolObj.aid = threadpool.agentId threadpoolObj.profile = profileObj threadpoolObj.numWorkers = threadpool.numWorkers threadpools[threadpoolName] = threadpoolObj yield self.dbStore.add(threadpoolObj) @inlineCallbacks def _setWorkloadType(workload, workloadObj): if workload.workloadType is not None and workload.agentId is not None: workloadTypeSet = yield self.dbStore.find( WorkloadType, And(WorkloadType.aid == workload.agentId, WorkloadType.name == unicode(workload.workloadType))) workloadTypeObj = workloadTypeSet.one() workloadObj.wltid = workloadTypeObj.id else: workloadObj.wltid = None def _setThreadpool(workload, workloadObj): workloadObj.tpid = threadpools[workloadObj.name].id \ if workload.threadpool is not None \ else None if not newProfile: workloadSet = yield self.dbStore.find( ExperimentWorkload, ExperimentWorkload.profile == profileObj) for workloadObj in workloadSet: if workloadObj.name in profile.workloads: workload = profile.workloads[workloadObj.name] _setThreadpool(workload, workloadObj) yield _setWorkloadType(workload, workloadObj) workloadObj.params = workload.params del profile.workloads[workloadObj.name] yield self.dbStore.add(workloadObj) else: yield self.dbStore.remove(workloadObj) for workloadName, workload in profile.workloads.iteritems(): workloadObj = ExperimentWorkload() workloadObj.name = unicode(workloadName) workloadObj.profile = profileObj _setThreadpool(workload, workloadObj) yield _setWorkloadType(workload, workloadObj) workloadObj.params = workload.params # TODO: implement workload steps workloadObj.stepsId = None yield self.dbStore.add(workloadObj) yield self.dbStore.commit()
def link(self, local, remote, setting=False): """Link objects to represent their relation. @param local: Object representing the I{local} side of the reference. @param remote: Object representing the I{remote} side of the reference, or the actual value to be set as the local key. @param setting: Pass true when the relationship is being newly created. """ local_info = get_obj_info(local) try: remote_info = get_obj_info(remote) except ClassInfoError: # Must be a plain key. Just set it. # XXX I guess this is broken if self.on_remote is True. local_variables = self.get_local_variables(local) if type(remote) is not tuple: remote = (remote, ) assert len(remote) == len(local_variables) for variable, value in zip(local_variables, remote): variable.set(value) return local_store = Store.of(local) remote_store = Store.of(remote) if setting: if local_store is None: if remote_store is None: local_info.event.hook("added", self._add_all, local_info) remote_info.event.hook("added", self._add_all, local_info) else: remote_store.add(local) local_store = remote_store elif remote_store is None: local_store.add(remote) elif local_store is not remote_store: raise WrongStoreError( "%r and %r cannot be linked because they " "are in different stores." % (local, remote)) # In cases below, we maintain a reference to the remote object # to make sure it won't get deallocated while the link is active. relation_data = local_info.get(self) if self.many: if relation_data is None: relation_data = local_info[self] = { "remote": { remote_info: remote } } else: relation_data["remote"][remote_info] = remote else: if relation_data is None: relation_data = local_info[self] = {"remote": remote} else: old_remote = relation_data.get("remote") if old_remote is not None: self.unlink(local_info, get_obj_info(old_remote)) relation_data["remote"] = remote if setting: local_vars = local_info.variables remote_vars = remote_info.variables pairs = zip(self._get_local_columns(local.__class__), self.remote_key) if self.on_remote: local_has_changed = False for local_column, remote_column in pairs: local_var = local_vars[local_column] if not local_var.is_defined(): remote_vars[remote_column].set(PendingReferenceValue) else: remote_vars[remote_column].set(local_var.get()) if local_var.has_changed(): local_has_changed = True if local_has_changed: self._add_flush_order(local_info, remote_info) local_info.event.hook("changed", self._track_local_changes, remote_info) local_info.event.hook("flushed", self._break_on_local_flushed, remote_info) #local_info.event.hook("removed", self._break_on_local_removed, # remote_info) remote_info.event.hook("removed", self._break_on_remote_removed, weakref.ref(local_info)) else: remote_has_changed = False for local_column, remote_column in pairs: remote_var = remote_vars[remote_column] if not remote_var.is_defined(): local_vars[local_column].set(PendingReferenceValue) else: local_vars[local_column].set(remote_var.get()) if remote_var.has_changed(): remote_has_changed = True if remote_has_changed: self._add_flush_order(local_info, remote_info, remote_first=True) remote_info.event.hook("changed", self._track_remote_changes, local_info) remote_info.event.hook("flushed", self._break_on_remote_flushed, local_info) #local_info.event.hook("removed", self._break_on_remote_removed, # local_info) local_info.event.hook("changed", self._break_on_local_diverged, remote_info) else: local_info.event.hook("changed", self._break_on_local_diverged, remote_info) remote_info.event.hook("changed", self._break_on_remote_diverged, weakref.ref(local_info)) if self.on_remote: remote_info.event.hook("removed", self._break_on_remote_removed, weakref.ref(local_info))
def activity(self): store = Store.of(self) return store.find(BugWatchActivity, BugWatchActivity.bug_watch == self).order_by( Desc('activity_date'))
def __init__(self, database): StormStore.__init__(self, database)
def getBugMessages(self, clauses=[]): return Store.of(self).find(BugMessage, BugMessage.bug == self.bug.id, BugMessage.bugwatch == self.id, *clauses)
def _createRepository(self, requester, path, clone_from=None): try: namespace, repository_name, default_func = ( self._getGitNamespaceExtras(path, requester)) except InvalidNamespace: raise faults.PermissionDenied( "'%s' is not a valid Git repository path." % path) except NoSuchPerson as e: raise faults.NotFound("User/team '%s' does not exist." % e.name) except (NoSuchProduct, InvalidProductName) as e: raise faults.NotFound("Project '%s' does not exist." % e.name) except NoSuchSourcePackageName as e: try: getUtility(ISourcePackageNameSet).new(e.name) except InvalidName: raise faults.InvalidSourcePackageName(e.name) return self._createRepository(requester, path) except NameLookupFailed as e: raise faults.NotFound(unicode(e)) except GitRepositoryCreationForbidden as e: raise faults.PermissionDenied(unicode(e)) try: repository = namespace.createRepository( GitRepositoryType.HOSTED, requester, repository_name) except LaunchpadValidationError as e: # Despite the fault name, this just passes through the exception # text so there's no need for a new Git-specific fault. raise faults.InvalidBranchName(e) except GitRepositoryExists as e: # We should never get here, as we just tried to translate the # path and found nothing (not even an inaccessible private # repository). Log an OOPS for investigation. self._reportError(path, e) except GitRepositoryCreationException as e: raise faults.PermissionDenied(unicode(e)) try: if default_func: try: default_func(repository) except Unauthorized: raise faults.PermissionDenied( "You cannot set the default Git repository for '%s'." % path) # Flush to make sure that repository.id is populated. Store.of(repository).flush() assert repository.id is not None # If repository has target_default, clone from default. target_path = None try: default = self.repository_set.getDefaultRepository( repository.target) if default is not None and default.visibleByUser(requester): target_path = default.getInternalPath() else: default = self.repository_set.getDefaultRepositoryForOwner( repository.owner, repository.target) if (default is not None and default.visibleByUser(requester)): target_path = default.getInternalPath() except GitTargetError: pass # Ignore Personal repositories. hosting_path = repository.getInternalPath() try: getUtility(IGitHostingClient).create( hosting_path, clone_from=target_path) except GitRepositoryCreationFault as e: # The hosting service failed. Log an OOPS for investigation. self._reportError(path, e, hosting_path=hosting_path) except Exception: # We don't want to keep the repository we created. transaction.abort() raise
def binary_builds(self): """See `ISourcePackageRecipeBuild`.""" return Store.of(self).find( BinaryPackageBuild, BinaryPackageBuild.source_package_release == SourcePackageRelease.id, SourcePackageRelease.source_package_recipe_build == self.id)
def get_store(): return Store(create_database(Settings.db_uri))
def source_package_release(self): """See `ISourcePackageRecipeBuild`.""" return Store.of(self).find(SourcePackageRelease, source_package_recipe_build=self).one()
def package_diffs(self): return list( Store.of(self).find(PackageDiff, to_source=self).order_by( Desc(PackageDiff.date_requested)))
class TSUserAgent(TSLocalAgent): agentId = userAgentId uuid = userAgentUUID agentType = userAgentType def __init__(self, server, connString): TSLocalAgent.__init__(self, server) self.client.getId() self.logger = logging.getLogger('UserAgent') self.rootAgent = server.localAgents[0] self.agentUsers = {} self.authServices = {'local': LocalAuth()} self.database = create_database(connString) self.dbStore = Store(self.database) self.server.listenerFlows.append( Flow(dstAgentId=userAgentId, command='authUser')) @TSMethodImpl(UserAgent.authUser) def authUser(self, context, **kw): @inlineCallbacks def implementation(context, userName, userPassword): userSet = yield self.dbStore.find(User, User.name == str(userName)) user = yield userSet.one() self.logger.info('Authorizing user %s', userName) if user is None: self.logger.warning('Error authorizing user: no such user: %s', userName) raise UserAuthError('No such user: %s' % userName) authMethod = self.authServices[user.authService] if authMethod.authentificate(user, userPassword): agentId = context.client.getId() self.agentUsers[agentId] = user.id roles = yield user.roles role = self._setupRoles(context.client, roles) userDescr = TSUserDescriptor() userDescr.name = user.gecosName userDescr.role = role returnValue(userDescr) return implementation(context, **kw) def _setupRoles(self, client, roles): # First pass - identify maximum role maxRole = TSServerClient.AUTH_NONE for role in roles: if role.role == 'admin': maxRole = TSServerClient.AUTH_ADMIN elif role.role == 'operator' and maxRole != TSServerClient.AUTH_ADMIN: maxRole = TSServerClient.AUTH_OPERATOR else: maxRole = TSServerClient.AUTH_USER client.authorize(maxRole) if maxRole != TSServerClient.AUTH_ADMIN: # TODO: For user/operator need to set ACLs pass return maxRole def onDisconnect(self): self.dbStore.close()
def get_store(self): """Generate a instance of the Store object""" return Store(create_database(self.uri_string))
def setUp(self): super(BlockAccessTest, self).setUp() database = SQLite(URI("sqlite:")) self.store = Store(database)
def merge_people(from_person, to_person, reviewer, delete=False): """Helper for merge and delete methods.""" # since we are doing direct SQL manipulation, make sure all # changes have been flushed to the database store = Store.of(from_person) store.flush() if (from_person.is_team and not to_person.is_team or not from_person.is_team and to_person.is_team): raise AssertionError("Users cannot be merged with teams.") if from_person.is_team and reviewer is None: raise AssertionError("Team merged require a reviewer.") if getUtility(IArchiveSet).getPPAOwnedByPerson( from_person, statuses=[ArchiveStatus.ACTIVE, ArchiveStatus.DELETING]) is not None: raise AssertionError( 'from_person has a ppa in ACTIVE or DELETING status') from_person_branches = getUtility(IAllBranches).ownedBy(from_person) if not from_person_branches.isPrivate().is_empty(): raise AssertionError('from_person has private branches.') if from_person.is_team: _purgeUnmergableTeamArtifacts(from_person, to_person, reviewer) if not getUtility( IEmailAddressSet).getByPerson(from_person).is_empty(): raise AssertionError('from_person still has email addresses.') # Get a database cursor. cur = cursor() # These table.columns will be skipped by the 'catch all' # update performed later skip = [ # The AccessPolicy.person reference is to allow private teams to # see their own +junk branches. We don't allow merges for teams who # own private branches so we can skip this column. ('accesspolicy', 'person'), ('teammembership', 'person'), ('teammembership', 'team'), ('teamparticipation', 'person'), ('teamparticipation', 'team'), ('personlanguage', 'person'), ('person', 'merged'), ('personsettings', 'person'), ('emailaddress', 'person'), # Polls are not carried over when merging teams. ('poll', 'team'), # We can safely ignore the mailinglist table as there's a sanity # check above which prevents teams with associated mailing lists # from being merged. ('mailinglist', 'team'), # I don't think we need to worry about the votecast and vote # tables, because a real human should never have two profiles # in Launchpad that are active members of a given team and voted # in a given poll. -- GuilhermeSalgado 2005-07-07 # We also can't afford to change poll results after they are # closed -- StuartBishop 20060602 ('votecast', 'person'), ('vote', 'person'), ('translationrelicensingagreement', 'person'), # These are ON DELETE CASCADE and maintained by triggers. ('bugsummary', 'viewed_by'), ('bugsummaryjournal', 'viewed_by'), ('latestpersonsourcepackagereleasecache', 'creator'), ('latestpersonsourcepackagereleasecache', 'maintainer'), # Obsolete table. ('branchmergequeue', 'owner'), ] references = list(postgresql.listReferences(cur, 'person', 'id')) postgresql.check_indirect_references(references) # These rows are in a UNIQUE index, and we can only move them # to the new Person if there is not already an entry. eg. if # the destination and source persons are both subscribed to a bug, # we cannot change the source persons subscription. We just leave them # as noise for the time being. to_id = to_person.id from_id = from_person.id # Update PersonLocation, which is a Person-decorator table. _merge_person_decoration( to_person, from_person, skip, 'PersonLocation', 'person', ['last_modified_by', ]) # Update GPGKey. It won't conflict, but our sanity checks don't # know that. cur.execute( 'UPDATE GPGKey SET owner=%(to_id)d WHERE owner=%(from_id)d' % vars()) skip.append(('gpgkey', 'owner')) _mergeAccessArtifactGrant(cur, from_id, to_id) _mergeAccessPolicyGrant(cur, from_id, to_id) _mergeGitRuleGrant(cur, from_id, to_id) skip.append(('accessartifactgrant', 'grantee')) skip.append(('accesspolicygrant', 'grantee')) skip.append(('gitrulegrant', 'grantee')) # Update the Branches that will not conflict, and fudge the names of # ones that *do* conflict. _mergeBranches(from_person, to_person) skip.append(('branch', 'owner')) # Update the GitRepositories that will not conflict, and fudge the names # of ones that *do* conflict. _mergeGitRepositories(from_person, to_person) skip.append(('gitrepository', 'owner')) _mergeSourcePackageRecipes(from_person, to_person) skip.append(('sourcepackagerecipe', 'owner')) _mergeMailingListSubscriptions(cur, from_id, to_id) skip.append(('mailinglistsubscription', 'person')) _mergeBranchSubscription(cur, from_id, to_id) skip.append(('branchsubscription', 'person')) _mergeGitSubscription(cur, from_id, to_id) skip.append(('gitsubscription', 'person')) _mergeBugAffectsPerson(cur, from_id, to_id) skip.append(('bugaffectsperson', 'person')) _mergeAnswerContact(cur, from_id, to_id) skip.append(('answercontact', 'person')) _mergeQuestionSubscription(cur, from_id, to_id) skip.append(('questionsubscription', 'person')) _mergeBugNotificationRecipient(cur, from_id, to_id) skip.append(('bugnotificationrecipient', 'person')) # We ignore BugSubscriptionFilterMutes. skip.append(('bugsubscriptionfiltermute', 'person')) # We ignore BugMutes. skip.append(('bugmute', 'person')) _mergeStructuralSubscriptions(cur, from_id, to_id) skip.append(('structuralsubscription', 'subscriber')) _mergeSpecificationSubscription(cur, from_id, to_id) skip.append(('specificationsubscription', 'person')) _mergeSprintAttendance(cur, from_id, to_id) skip.append(('sprintattendance', 'attendee')) _mergePOExportRequest(cur, from_id, to_id) skip.append(('poexportrequest', 'person')) _mergeTranslationMessage(cur, from_id, to_id) skip.append(('translationmessage', 'submitter')) skip.append(('translationmessage', 'reviewer')) # Handle the POFileTranslator cache by doing nothing. As it is # maintained by triggers, the data migration has already been done # for us when we updated the source tables. skip.append(('pofiletranslator', 'person')) _mergeTranslationImportQueueEntry(cur, from_id, to_id) skip.append(('translationimportqueueentry', 'importer')) # XXX cprov 2007-02-22 bug=87098: # Since we only allow one PPA for each user, # we can't reassign the old user archive to the new user. # It need to be done manually, probably by reasinning all publications # to the old PPA to the new one, performing a careful_publishing on it # and removing the old one from disk. skip.append(('archive', 'owner')) _mergeCodeReviewVote(cur, from_id, to_id) skip.append(('codereviewvote', 'reviewer')) _mergeKarmaCache(cur, from_id, to_id, from_person.karma) skip.append(('karmacache', 'person')) skip.append(('karmatotalcache', 'person')) _mergeDateCreated(cur, from_id, to_id) _mergeLoginTokens(cur, from_id, to_id) skip.append(('logintoken', 'requester')) _mergeCodeReviewInlineCommentDraft(cur, from_id, to_id) skip.append(('codereviewinlinecommentdraft', 'person')) _mergeLiveFS(cur, from_person, to_person) skip.append(('livefs', 'owner')) _mergeSnap(cur, from_person, to_person) skip.append(('snap', 'owner')) # Sanity check. If we have a reference that participates in a # UNIQUE index, it must have already been handled by this point. # We can tell this by looking at the skip list. for src_tab, src_col, ref_tab, ref_col, updact, delact in references: uniques = postgresql.listUniques(cur, src_tab, src_col) if len(uniques) > 0 and (src_tab, src_col) not in skip: raise NotImplementedError( '%s.%s reference to %s.%s is in a UNIQUE index ' 'but has not been handled' % ( src_tab, src_col, ref_tab, ref_col)) # Handle all simple cases for src_tab, src_col, ref_tab, ref_col, updact, delact in references: if (src_tab, src_col) in skip: continue cur.execute('UPDATE %s SET %s=%d WHERE %s=%d' % ( src_tab, src_col, to_person.id, src_col, from_person.id)) _mergeTeamMembership(cur, from_id, to_id) _mergeProposedInvitedTeamMembership(cur, from_id, to_id) # Flag the person as merged cur.execute(''' UPDATE Person SET merged=%(to_id)d WHERE id=%(from_id)d ''' % vars()) # Append a -merged suffix to the person's name. name = base = "%s-merged" % from_person.name.encode('ascii') cur.execute("SELECT id FROM Person WHERE name = %s" % sqlvalues(name)) i = 1 while cur.fetchone(): name = "%s%d" % (base, i) cur.execute("SELECT id FROM Person WHERE name = %s" % sqlvalues(name)) i += 1 cur.execute("UPDATE Person SET name = %s WHERE id = %s" % sqlvalues(name, from_person)) # Since we've updated the database behind Storm's back, # flush its caches. store.invalidate() # Move OpenId Identifiers from the merged account to the new # account. if from_person.account is not None and to_person.account is not None: store.execute(""" UPDATE OpenIdIdentifier SET account=%s WHERE account=%s """ % sqlvalues(to_person.accountID, from_person.accountID)) if delete: # We don't notify anyone about deletes. return # Inform the user of the merge changes. if to_person.is_team: mail_text = get_email_template( 'team-merged.txt', app='registry') subject = 'Launchpad teams merged' else: mail_text = get_email_template( 'person-merged.txt', app='registry') subject = 'Launchpad accounts merged' mail_text = mail_text % { 'dupename': from_person.name, 'person': to_person.name, } getUtility(IPersonNotificationSet).addNotification( to_person, subject, mail_text)
class SQLObjectTest(TestHelper): def setUp(self): TestHelper.setUp(self) # Allow classes with the same name in different tests to resolve # property path strings properly. SQLObjectBase._storm_property_registry.clear() self.store = Store(create_database("sqlite:")) class SQLObject(SQLObjectBase): @staticmethod def _get_store(): return self.store self.SQLObject = SQLObject self.store.execute("CREATE TABLE person " "(id INTEGER PRIMARY KEY, name TEXT, age INTEGER," " ts TIMESTAMP, delta INTERVAL," " address_id INTEGER)") self.store.execute("INSERT INTO person VALUES " "(1, 'John Joe', 20, '2007-02-05 19:53:15'," " '1 day, 12:34:56', 1)") self.store.execute("INSERT INTO person VALUES " "(2, 'John Doe', 20, '2007-02-05 20:53:15'," " '42 days 12:34:56.78', 2)") self.store.execute("CREATE TABLE address " "(id INTEGER PRIMARY KEY, city TEXT)") self.store.execute("INSERT INTO address VALUES (1, 'Curitiba')") self.store.execute("INSERT INTO address VALUES (2, 'Sao Carlos')") self.store.execute("CREATE TABLE phone " "(id INTEGER PRIMARY KEY, person_id INTEGER," "number TEXT)") self.store.execute("INSERT INTO phone VALUES (1, 2, '1234-5678')") self.store.execute("INSERT INTO phone VALUES (2, 1, '8765-4321')") self.store.execute("INSERT INTO phone VALUES (3, 2, '8765-5678')") self.store.execute("CREATE TABLE person_phone " "(id INTEGER PRIMARY KEY, person_id INTEGER, " "phone_id INTEGER)") self.store.execute("INSERT INTO person_phone VALUES (1, 2, 1)") self.store.execute("INSERT INTO person_phone VALUES (2, 2, 2)") self.store.execute("INSERT INTO person_phone VALUES (3, 1, 1)") class Person(self.SQLObject): _defaultOrder = "-Person.name" name = StringCol() age = IntCol() ts = UtcDateTimeCol() self.Person = Person def test_get(self): person = self.Person.get(2) self.assertTrue(person) self.assertEquals(person.name, "John Doe") def test_get_not_found(self): self.assertRaises(SQLObjectNotFound, self.Person.get, 1000) def test_get_typecast(self): person = self.Person.get("2") self.assertTrue(person) self.assertEquals(person.name, "John Doe") def test_destroySelf(self): person = self.Person.get(2) person.destroySelf() self.assertRaises(SQLObjectNotFound, self.Person.get, 2) def test_delete(self): self.Person.delete(2) self.assertRaises(SQLObjectNotFound, self.Person.get, 2) def test_custom_table_name(self): class MyPerson(self.Person): _table = "person" person = MyPerson.get(2) self.assertTrue(person) self.assertEquals(person.name, "John Doe") def test_custom_id_name(self): class MyPerson(self.SQLObject): _defaultOrder = "-Person.name" _table = "person" _idName = "name" _idType = unicode age = IntCol() ts = UtcDateTimeCol() person = MyPerson.get("John Doe") self.assertTrue(person) self.assertEquals(person.id, "John Doe") def test_create(self): person = self.Person(name="John Joe") self.assertTrue(Store.of(person) is self.store) self.assertEquals(type(person.id), int) self.assertEquals(person.name, "John Joe") def test_SO_creating(self): test = self class Person(self.Person): def set(self, **args): test.assertEquals(self._SO_creating, True) test.assertEquals(args, {"name": "John Joe"}) person = Person(name="John Joe") self.assertEquals(person._SO_creating, False) def test_object_not_added_if__create_fails(self): objects = [] class Person(self.Person): def _create(self, id, **kwargs): objects.append(self) raise RuntimeError self.assertRaises(RuntimeError, Person, name="John Joe") self.assertEquals(len(objects), 1) person = objects[0] self.assertEquals(Store.of(person), None) def test_init_hook(self): called = [] class Person(self.Person): def _init(self, *args, **kwargs): called.append(True) person = Person(name="John Joe") self.assertEquals(called, [True]) Person.get(2) self.assertEquals(called, [True, True]) def test_alternateID(self): class Person(self.SQLObject): name = StringCol(alternateID=True) person = Person.byName("John Doe") self.assertTrue(person) self.assertEquals(person.name, "John Doe") def test_alternateMethodName(self): class Person(self.SQLObject): name = StringCol(alternateMethodName="byFoo") person = Person.byFoo("John Doe") self.assertTrue(person) self.assertEquals(person.name, "John Doe") self.assertRaises(SQLObjectNotFound, Person.byFoo, "John None") def test_select(self): result = self.Person.select("name = 'John Joe'") self.assertEquals(result[0].name, "John Joe") def test_select_sqlbuilder(self): result = self.Person.select(self.Person.q.name == "John Joe") self.assertEqual(result[0].name, "John Joe") def test_select_orderBy(self): result = self.Person.select("name LIKE 'John%'", orderBy=("name","id")) self.assertEquals(result[0].name, "John Doe") def test_select_orderBy_expr(self): result = self.Person.select("name LIKE 'John%'", orderBy=self.Person.name) self.assertEquals(result[0].name, "John Doe") def test_select_all(self): result = self.Person.select() self.assertEquals(result[0].name, "John Joe") def test_select_empty_string(self): result = self.Person.select('') self.assertEquals(result[0].name, "John Joe") def test_select_limit(self): result = self.Person.select(limit=1) self.assertEquals(len(list(result)), 1) def test_select_negative_offset(self): result = self.Person.select(orderBy="name") self.assertEquals(result[-1].name, "John Joe") def test_select_slice_negative_offset(self): result = self.Person.select(orderBy="name")[-1:] self.assertEquals(result[0].name, "John Joe") def test_select_distinct(self): result = self.Person.select("person.name = 'John Joe'", clauseTables=["phone"], distinct=True) self.assertEquals(len(list(result)), 1) def test_select_selectAlso(self): # Since John Doe has two phone numbers, this would return him # twice without the distinct=True bit. result = self.Person.select( "person.id = phone.person_id", clauseTables=["phone"], selectAlso="LOWER(name) AS lower_name", orderBy="lower_name", distinct=True) people = list(result) self.assertEquals(len(people), 2) self.assertEquals(people[0].name, "John Doe") self.assertEquals(people[1].name, "John Joe") def test_select_selectAlso_with_prejoin(self): class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id", notNull=True) class Address(self.SQLObject): city = StringCol() result = Person.select( prejoins=["address"], selectAlso="LOWER(person.name) AS lower_name", orderBy="lower_name") people = list(result) self.assertEquals(len(people), 2) self.assertEquals([(person.name, person.address.city) for person in people], [("John Doe", "Sao Carlos"), ("John Joe", "Curitiba")]) def test_select_clauseTables_simple(self): result = self.Person.select("name = 'John Joe'", ["person"]) self.assertEquals(result[0].name, "John Joe") def test_select_clauseTables_implicit_join(self): result = self.Person.select("person.name = 'John Joe' and " "phone.person_id = person.id", ["person", "phone"]) self.assertEquals(result[0].name, "John Joe") def test_select_clauseTables_no_cls_table(self): result = self.Person.select("person.name = 'John Joe' and " "phone.person_id = person.id", ["phone"]) self.assertEquals(result[0].name, "John Joe") def test_selectBy(self): result = self.Person.selectBy(name="John Joe") self.assertEquals(result[0].name, "John Joe") def test_selectBy_orderBy(self): result = self.Person.selectBy(age=20, orderBy="name") self.assertEquals(result[0].name, "John Doe") result = self.Person.selectBy(age=20, orderBy="-name") self.assertEquals(result[0].name, "John Joe") def test_selectOne(self): person = self.Person.selectOne("name = 'John Joe'") self.assertTrue(person) self.assertEquals(person.name, "John Joe") nobody = self.Person.selectOne("name = 'John None'") self.assertEquals(nobody, None) # SQLBuilder style expression: person = self.Person.selectOne(self.Person.q.name == "John Joe") self.assertNotEqual(person, None) self.assertEqual(person.name, "John Joe") def test_selectOne_multiple_results(self): self.assertRaises(SQLObjectMoreThanOneResultError, self.Person.selectOne) def test_selectOne_clauseTables(self): person = self.Person.selectOne("person.name = 'John Joe' and " "phone.person_id = person.id", ["phone"]) self.assertEquals(person.name, "John Joe") def test_selectOneBy(self): person = self.Person.selectOneBy(name="John Joe") self.assertTrue(person) self.assertEquals(person.name, "John Joe") nobody = self.Person.selectOneBy(name="John None") self.assertEquals(nobody, None) def test_selectOneBy_multiple_results(self): self.assertRaises(SQLObjectMoreThanOneResultError, self.Person.selectOneBy) def test_selectFirst(self): person = self.Person.selectFirst("name LIKE 'John%'", orderBy="name") self.assertTrue(person) self.assertEquals(person.name, "John Doe") person = self.Person.selectFirst("name LIKE 'John%'", orderBy="-name") self.assertTrue(person) self.assertEquals(person.name, "John Joe") nobody = self.Person.selectFirst("name = 'John None'", orderBy="name") self.assertEquals(nobody, None) # SQLBuilder style expression: person = self.Person.selectFirst(LIKE(self.Person.q.name, "John%"), orderBy="name") self.assertNotEqual(person, None) self.assertEqual(person.name, "John Doe") def test_selectFirst_default_order(self): person = self.Person.selectFirst("name LIKE 'John%'") self.assertTrue(person) self.assertEquals(person.name, "John Joe") def test_selectFirst_default_order_list(self): class Person(self.Person): _defaultOrder = ["name"] person = Person.selectFirst("name LIKE 'John%'") self.assertTrue(person) self.assertEquals(person.name, "John Doe") def test_selectFirst_default_order_expr(self): class Person(self.Person): _defaultOrder = [SQLConstant("name")] person = Person.selectFirst("name LIKE 'John%'") self.assertTrue(person) self.assertEquals(person.name, "John Doe") def test_selectFirst_default_order_fully_qualified(self): class Person(self.Person): _defaultOrder = ["person.name"] person = Person.selectFirst("name LIKE 'John%'") self.assertTrue(person) self.assertEquals(person.name, "John Doe") def test_selectFirstBy(self): person = self.Person.selectFirstBy(age=20, orderBy="name") self.assertTrue(person) self.assertEquals(person.name, "John Doe") person = self.Person.selectFirstBy(age=20, orderBy="-name") self.assertTrue(person) self.assertEquals(person.name, "John Joe") nobody = self.Person.selectFirstBy(age=1000, orderBy="name") self.assertEquals(nobody, None) def test_selectFirstBy_default_order(self): person = self.Person.selectFirstBy(age=20) self.assertTrue(person) self.assertEquals(person.name, "John Joe") def test_syncUpdate(self): """syncUpdate() flushes pending changes to the database.""" person = self.Person.get(id=1) person.name = "John Smith" person.syncUpdate() name = self.store.execute( "SELECT name FROM person WHERE id = 1").get_one()[0] self.assertEquals(name, "John Smith") def test_sync(self): """sync() flushes pending changes and invalidates the cache.""" person = self.Person.get(id=1) person.name = "John Smith" person.sync() name = self.store.execute( "SELECT name FROM person WHERE id = 1").get_one()[0] self.assertEquals(name, "John Smith") # Now make a change behind Storm's back and show that sync() # makes the new value from the database visible. self.store.execute("UPDATE person SET name = 'Jane Smith' " "WHERE id = 1", noresult=True) person.sync() self.assertEquals(person.name, "Jane Smith") def test_col_name(self): class Person(self.SQLObject): foo = StringCol(dbName="name") person = Person.get(2) self.assertEquals(person.foo, "John Doe") class Person(self.SQLObject): foo = StringCol("name") person = Person.get(2) self.assertEquals(person.foo, "John Doe") def test_col_default(self): class Person(self.SQLObject): name = StringCol(default="Johny") person = Person() self.assertEquals(person.name, "Johny") def test_col_default_factory(self): class Person(self.SQLObject): name = StringCol(default=lambda: "Johny") person = Person() self.assertEquals(person.name, "Johny") def test_col_not_null(self): class Person(self.SQLObject): name = StringCol(notNull=True) person = Person.get(2) self.assertRaises(NoneError, setattr, person, "name", None) def test_col_storm_validator(self): calls = [] def validator(obj, attr, value): calls.append((obj, attr, value)) return value class Person(self.SQLObject): name = StringCol(storm_validator=validator) person = Person.get(2) person.name = u'foo' self.assertEquals(calls, [(person, 'name', u'foo')]) def test_string_col(self): class Person(self.SQLObject): name = StringCol() person = Person.get(2) self.assertEquals(person.name, "John Doe") def test_int_col(self): class Person(self.SQLObject): age = IntCol() person = Person.get(2) self.assertEquals(person.age, 20) def test_bool_col(self): class Person(self.SQLObject): age = BoolCol() person = Person.get(2) self.assertEquals(person.age, True) def test_float_col(self): class Person(self.SQLObject): age = FloatCol() person = Person.get(2) self.assertTrue(abs(person.age - 20.0) < 1e-6) def test_utcdatetime_col(self): class Person(self.SQLObject): ts = UtcDateTimeCol() person = Person.get(2) self.assertEquals(person.ts, datetime.datetime(2007, 2, 5, 20, 53, 15, tzinfo=tzutc())) def test_date_col(self): class Person(self.SQLObject): ts = DateCol() person = Person.get(2) self.assertEquals(person.ts, datetime.date(2007, 2, 5)) def test_interval_col(self): class Person(self.SQLObject): delta = IntervalCol() person = Person.get(2) self.assertEquals(person.delta, datetime.timedelta(42, 45296, 780000)) def test_foreign_key(self): class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id", notNull=True) class Address(self.SQLObject): city = StringCol() person = Person.get(2) self.assertEquals(person.addressID, 2) self.assertEquals(person.address.city, "Sao Carlos") def test_foreign_key_no_dbname(self): self.store.execute("CREATE TABLE another_person " "(id INTEGER PRIMARY KEY, name TEXT, age INTEGER," " ts TIMESTAMP, address INTEGER)") self.store.execute("INSERT INTO another_person VALUES " "(2, 'John Doe', 20, '2007-02-05 20:53:15', 2)") class AnotherPerson(self.Person): address = ForeignKey(foreignKey="Address", notNull=True) class Address(self.SQLObject): city = StringCol() person = AnotherPerson.get(2) self.assertEquals(person.addressID, 2) self.assertEquals(person.address.city, "Sao Carlos") def test_foreign_key_orderBy(self): class Person(self.Person): _defaultOrder = "address" address = ForeignKey(foreignKey="Address", dbName="address_id", notNull=True) class Address(self.SQLObject): city = StringCol() person = Person.selectFirst() self.assertEquals(person.addressID, 1) def test_foreign_key_storm_validator(self): calls = [] def validator(obj, attr, value): calls.append((obj, attr, value)) return value class Person(self.SQLObject): address = ForeignKey(foreignKey="Address", dbName="address_id", storm_validator=validator) class Address(self.SQLObject): city = StringCol() person = Person.get(2) address = Address.get(1) person.address = address self.assertEquals(calls, [(person, 'addressID', 1)]) def test_multiple_join(self): class AnotherPerson(self.Person): _table = "person" phones = SQLMultipleJoin("Phone", joinColumn="person") class Phone(self.SQLObject): person = ForeignKey("AnotherPerson", dbName="person_id") number = StringCol() person = AnotherPerson.get(2) # Make sure that the result is wrapped. result = person.phones.orderBy("-number") self.assertEquals([phone.number for phone in result], ["8765-5678", "1234-5678"]) # Test add/remove methods. number = Phone.selectOneBy(number="1234-5678") person.removePhone(number) self.assertEquals(sorted(phone.number for phone in person.phones), ["8765-5678"]) person.addPhone(number) self.assertEquals(sorted(phone.number for phone in person.phones), ["1234-5678", "8765-5678"]) def test_multiple_join_prejoins(self): self.store.execute("ALTER TABLE phone ADD COLUMN address_id INT") self.store.execute("UPDATE phone SET address_id = 1") self.store.execute("UPDATE phone SET address_id = 2 WHERE id = 3") class AnotherPerson(self.Person): _table = "person" phones = SQLMultipleJoin("Phone", joinColumn="person", orderBy="number", prejoins=["address"]) class Phone(self.SQLObject): person = ForeignKey("AnotherPerson", dbName="person_id") address = ForeignKey("Address", dbName="address_id") number = StringCol() class Address(self.SQLObject): city = StringCol() person = AnotherPerson.get(2) [phone1, phone2] = person.phones # Delete addresses behind Storm's back to show that the # addresses have been loaded. self.store.execute("DELETE FROM address") self.assertEquals(phone1.number, "1234-5678") self.assertEquals(phone1.address.city, "Curitiba") self.assertEquals(phone2.number, "8765-5678") self.assertEquals(phone2.address.city, "Sao Carlos") def test_related_join(self): class AnotherPerson(self.Person): _table = "person" phones = SQLRelatedJoin("Phone", otherColumn="phone_id", intermediateTable="PersonPhone", joinColumn="person_id", orderBy="id") class PersonPhone(self.Person): person_id = IntCol() phone_id = IntCol() class Phone(self.SQLObject): number = StringCol() person = AnotherPerson.get(2) self.assertEquals([phone.number for phone in person.phones], ["1234-5678", "8765-4321"]) # Make sure that the result is wrapped. result = person.phones.orderBy("-number") self.assertEquals([phone.number for phone in result], ["8765-4321", "1234-5678"]) # Test add/remove methods. number = Phone.selectOneBy(number="1234-5678") person.removePhone(number) self.assertEquals(sorted(phone.number for phone in person.phones), ["8765-4321"]) person.addPhone(number) self.assertEquals(sorted(phone.number for phone in person.phones), ["1234-5678", "8765-4321"]) def test_related_join_prejoins(self): self.store.execute("ALTER TABLE phone ADD COLUMN address_id INT") self.store.execute("UPDATE phone SET address_id = 1") self.store.execute("UPDATE phone SET address_id = 2 WHERE id = 2") class AnotherPerson(self.Person): _table = "person" phones = SQLRelatedJoin("Phone", otherColumn="phone_id", intermediateTable="PersonPhone", joinColumn="person_id", orderBy="id", prejoins=["address"]) class PersonPhone(self.Person): person_id = IntCol() phone_id = IntCol() class Phone(self.SQLObject): number = StringCol() address = ForeignKey("Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() person = AnotherPerson.get(2) [phone1, phone2] = person.phones # Delete addresses behind Storm's back to show that the # addresses have been loaded. self.store.execute("DELETE FROM address") self.assertEquals(phone1.number, "1234-5678") self.assertEquals(phone1.address.city, "Curitiba") self.assertEquals(phone2.number, "8765-4321") self.assertEquals(phone2.address.city, "Sao Carlos") def test_single_join(self): self.store.execute("CREATE TABLE office " "(id INTEGER PRIMARY KEY, phone_id INTEGER," "name TEXT)") self.store.execute("INSERT INTO office VALUES (1, 1, 'An office')") class Phone(self.SQLObject): office = SingleJoin("Office", joinColumn="phoneID") class Office(self.SQLObject): phone = ForeignKey(foreignKey="Phone", dbName="phone_id", notNull=True) name = StringCol() office = Office.get(1) self.assertEqual(office.name, "An office") phone = Phone.get(1) self.assertEqual(phone.office, office) # The single join returns None for a phone with no office phone = Phone.get(2) self.assertEqual(phone.office, None) def test_result_set_orderBy(self): result = self.Person.select() result = result.orderBy("-name") self.assertEquals([person.name for person in result], ["John Joe", "John Doe"]) result = result.orderBy("name") self.assertEquals([person.name for person in result], ["John Doe", "John Joe"]) def test_result_set_orderBy_fully_qualified(self): result = self.Person.select() result = result.orderBy("-person.name") self.assertEquals([person.name for person in result], ["John Joe", "John Doe"]) result = result.orderBy("person.name") self.assertEquals([person.name for person in result], ["John Doe", "John Joe"]) def test_result_set_count(self): result = self.Person.select() self.assertEquals(result.count(), 2) def test_result_set_count_limit(self): result = self.Person.select(limit=1) self.assertEquals(len(list(result)), 1) self.assertEquals(result.count(), 1) def test_result_set_count_sliced(self): result = self.Person.select() sliced_result = result[1:] self.assertEquals(len(list(sliced_result)), 1) self.assertEquals(sliced_result.count(), 1) def test_result_set_count_sliced_empty(self): result = self.Person.select() sliced_result = result[1:1] self.assertEquals(len(list(sliced_result)), 0) self.assertEquals(sliced_result.count(), 0) def test_result_set_count_sliced_empty_zero(self): result = self.Person.select() sliced_result = result[0:0] self.assertEquals(len(list(sliced_result)), 0) self.assertEquals(sliced_result.count(), 0) def test_result_set_count_sliced_none(self): result = self.Person.select() sliced_result = result[None:None] self.assertEquals(len(list(sliced_result)), 2) self.assertEquals(sliced_result.count(), 2) def test_result_set_count_sliced_start_none(self): result = self.Person.select() sliced_result = result[None:1] self.assertEquals(len(list(sliced_result)), 1) self.assertEquals(sliced_result.count(), 1) def test_result_set_count_sliced_end_none(self): result = self.Person.select() sliced_result = result[1:None] self.assertEquals(len(list(sliced_result)), 1) self.assertEquals(sliced_result.count(), 1) def test_result_set_count_distinct(self): result = self.Person.select( "person.id = phone.person_id", clauseTables=["phone"], distinct=True) self.assertEquals(result.count(), 2) def test_result_set_count_union_distinct(self): result1 = self.Person.select("person.id = 1", distinct=True) result2 = self.Person.select("person.id = 2", distinct=True) self.assertEquals(result1.union(result2).count(), 2) def test_result_set_count_with_joins(self): result = self.Person.select( "person.address_id = address.id", clauseTables=["address"]) self.assertEquals(result.count(), 2) def test_result_set__getitem__(self): result = self.Person.select() self.assertEquals(result[0].name, "John Joe") def test_result_set__iter__(self): result = self.Person.select() self.assertEquals(list(result.__iter__())[0].name, "John Joe") def test_result_set__nonzero__(self): """ L{SQLObjectResultSet.__nonzero__} returns C{True} if the result set contains results. If it contains no results, C{False} is returned. """ result = self.Person.select() self.assertEquals(result.__nonzero__(), True) result = self.Person.select(self.Person.q.name == "No Person") self.assertEquals(result.__nonzero__(), False) def test_result_set_is_empty(self): """ L{SQLObjectResultSet.is_empty} returns C{True} if the result set doesn't contain any results. If it does contain results, C{False} is returned. """ result = self.Person.select() self.assertEquals(result.is_empty(), False) result = self.Person.select(self.Person.q.name == "No Person") self.assertEquals(result.is_empty(), True) def test_result_set_distinct(self): result = self.Person.select("person.name = 'John Joe'", clauseTables=["phone"]) self.assertEquals(len(list(result.distinct())), 1) def test_result_set_limit(self): result = self.Person.select() self.assertEquals(len(list(result.limit(1))), 1) def test_result_set_union(self): result1 = self.Person.selectBy(id=1) result2 = self.Person.selectBy(id=2) result3 = result1.union(result2, orderBy="name") self.assertEquals([person.name for person in result3], ["John Doe", "John Joe"]) def test_result_set_union_all(self): result1 = self.Person.selectBy(id=1) result2 = result1.union(result1, unionAll=True) self.assertEquals([person.name for person in result2], ["John Joe", "John Joe"]) def test_result_set_except_(self): person = self.Person(id=3, name="John Moe") result1 = self.Person.select() result2 = self.Person.selectBy(id=2) result3 = result1.except_(result2, orderBy="name") self.assertEquals([person.name for person in result3], ["John Joe", "John Moe"]) def test_result_set_intersect(self): person = self.Person(id=3, name="John Moe") result1 = self.Person.select() result2 = self.Person.select(self.Person.id.is_in((2, 3))) result3 = result1.intersect(result2, orderBy="name") self.assertEquals([person.name for person in result3], ["John Doe", "John Moe"]) def test_result_set_prejoin(self): self.store.execute("ALTER TABLE person ADD COLUMN phone_id INTEGER") self.store.execute("UPDATE person SET phone_id=1 WHERE name='John Doe'") class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") phone = ForeignKey(foreignKey="Phone", dbName="phone_id") class Address(self.SQLObject): city = StringCol() class Phone(self.SQLObject): number = StringCol() result = Person.select("person.name = 'John Doe'") result = result.prejoin(["address", "phone"]) people = list(result) # Remove rows behind its back. self.store.execute("DELETE FROM address") self.store.execute("DELETE FROM phone") # They were prefetched, so it should work even then. self.assertEquals([person.address.city for person in people], ["Sao Carlos"]) self.assertEquals([person.phone.number for person in people], ["1234-5678"]) def test_result_set_prejoin_getitem(self): """Ensure that detuplelizing is used on getitem.""" class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() result = Person.select("person.name = 'John Doe'", prejoins=["address"]) person = result[0] # Remove the row behind its back. self.store.execute("DELETE FROM address") # They were prefetched, so it should work even then. self.assertEquals(person.address.city, "Sao Carlos") def test_result_set_prejoin_one(self): """Ensure that detuplelizing is used on selectOne().""" class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() person = Person.selectOne("person.name = 'John Doe'", prejoins=["address"]) # Remove the row behind its back. self.store.execute("DELETE FROM address") # They were prefetched, so it should work even then. self.assertEquals(person.address.city, "Sao Carlos") def test_result_set_prejoin_first(self): """Ensure that detuplelizing is used on selectFirst().""" class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() person = Person.selectFirst("person.name = 'John Doe'", prejoins=["address"], orderBy="name") # Remove the row behind Storm's back. self.store.execute("DELETE FROM address") # They were prefetched, so it should work even then. self.assertEquals(person.address.city, "Sao Carlos") def test_result_set_prejoin_by(self): """Ensure that prejoins work with selectBy() queries.""" class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() result = Person.selectBy(name="John Doe").prejoin(["address"]) person = result[0] # Remove the row behind Storm's back. self.store.execute("DELETE FROM address") # They were prefetched, so it should work even then. self.assertEquals(person.address.city, "Sao Carlos") def test_result_set_prejoin_related(self): """Dotted prejoins are used to prejoin through another table.""" class Phone(self.SQLObject): person = ForeignKey(foreignKey="AnotherPerson", dbName="person_id") number = StringCol() class AnotherPerson(self.Person): _table = "person" address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() phone = Phone.selectOne("phone.number = '1234-5678'", prejoins=["person.address"]) # Remove the rows behind Storm's back. self.store.execute("DELETE FROM address") self.store.execute("DELETE FROM person") # They were prefetched, so it should work even then. self.assertEquals(phone.person.name, "John Doe") self.assertEquals(phone.person.address.city, "Sao Carlos") def test_result_set_prejoin_table_twice(self): """A single table can be prejoined multiple times.""" self.store.execute("CREATE TABLE lease " "(id INTEGER PRIMARY KEY," " landlord_id INTEGER, tenant_id INTEGER)") self.store.execute("INSERT INTO lease VALUES (1, 1, 2)") class Address(self.SQLObject): city = StringCol() class AnotherPerson(self.Person): _table = "person" address = ForeignKey(foreignKey="Address", dbName="address_id") class Lease(self.SQLObject): landlord = ForeignKey(foreignKey="AnotherPerson", dbName="landlord_id") tenant = ForeignKey(foreignKey="AnotherPerson", dbName="tenant_id") lease = Lease.select(prejoins=["landlord", "landlord.address", "tenant", "tenant.address"])[0] # Remove the person rows behind Storm's back. self.store.execute("DELETE FROM address") self.store.execute("DELETE FROM person") self.assertEquals(lease.landlord.name, "John Joe") self.assertEquals(lease.landlord.address.city, "Curitiba") self.assertEquals(lease.tenant.name, "John Doe") self.assertEquals(lease.tenant.address.city, "Sao Carlos") def test_result_set_prejoin_count(self): """Prejoins do not affect the result of aggregates like COUNT().""" class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() result = Person.select("name = 'John Doe'", prejoins=["address"]) self.assertEquals(result.count(), 1) def test_result_set_prejoin_mismatch_union(self): """Prejoins do not cause UNION incompatibilities. """ class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() # The prejoin should not prevent the union from working. At # the moment this is done by unconditionally stripping the # prejoins (which is what our SQLObject patch did), but could # be smarter. result1 = Person.select("name = 'John Doe'", prejoins=["address"]) result2 = Person.select("name = 'John Joe'") result = result1.union(result2) names = sorted(person.name for person in result) self.assertEquals(names, ["John Doe", "John Joe"]) def test_result_set_prejoin_mismatch_except(self): """Prejoins do not cause EXCEPT incompatibilities. """ class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() # The prejoin should not prevent the union from working. At # the moment this is done by unconditionally stripping the # prejoins (which is what our SQLObject patch did), but could # be smarter. result1 = Person.select("name = 'John Doe'", prejoins=["address"]) result2 = Person.select("name = 'John Joe'") result = result1.except_(result2) names = sorted(person.name for person in result) self.assertEquals(names, ["John Doe"]) def test_result_set_prejoin_mismatch_intersect(self): """Prejoins do not cause INTERSECT incompatibilities. """ class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() # The prejoin should not prevent the union from working. At # the moment this is done by unconditionally stripping the # prejoins (which is what our SQLObject patch did), but could # be smarter. result1 = Person.select("name = 'John Doe'", prejoins=["address"]) result2 = Person.select("name = 'John Doe'") result = result1.intersect(result2) names = sorted(person.name for person in result) self.assertEquals(names, ["John Doe"]) def test_result_set_prejoinClauseTables(self): self.store.execute("ALTER TABLE person ADD COLUMN phone_id INTEGER") self.store.execute("UPDATE person SET phone_id=1 WHERE name='John Doe'") class Person(self.Person): address = ForeignKey(foreignKey="AddressClass", dbName="address_id") phone = ForeignKey(foreignKey="PhoneClass", dbName="phone_id") # Name the class so that it doesn't match the table name, to ensure # that the prejoin is actually using table names, rather than class # names. class AddressClass(self.SQLObject): _table = "address" city = StringCol() class PhoneClass(self.SQLObject): _table = "phone" number = StringCol() result = Person.select("person.name = 'John Doe' and " "person.phone_id = phone.id and " "person.address_id = address.id", clauseTables=["address", "phone"]) result = result.prejoinClauseTables(["address", "phone"]) people = list(result) # Remove rows behind its back. self.store.execute("DELETE FROM address") self.store.execute("DELETE FROM phone") # They were prefetched, so it should work even then. self.assertEquals([person.address.city for person in people], ["Sao Carlos"]) self.assertEquals([person.phone.number for person in people], ["1234-5678"]) def test_result_set_sum_string(self): result = self.Person.select() self.assertEquals(result.sum('age'), 40) def test_result_set_sum_expr(self): result = self.Person.select() self.assertEquals(result.sum(self.Person.q.age), 40) def test_result_set_contains(self): john = self.Person.selectOneBy(name="John Doe") self.assertTrue(john in self.Person.select()) self.assertFalse(john in self.Person.selectBy(name="John Joe")) self.assertFalse(john in self.Person.select( "Person.name = 'John Joe'")) def test_result_set_contains_does_not_use_iter(self): """Calling 'item in result_set' does not iterate over the set. """ def no_iter(self): raise RuntimeError real_iter = SQLObjectResultSet.__iter__ SQLObjectResultSet.__iter__ = no_iter try: john = self.Person.selectOneBy(name="John Doe") self.assertTrue(john in self.Person.select()) finally: SQLObjectResultSet.__iter__ = real_iter def test_result_set_contains_wrong_type(self): class Address(self.SQLObject): city = StringCol() address = Address.get(1) result_set = self.Person.select() self.assertRaises(TypeError, operator.contains, result_set, address) def test_result_set_contains_with_prejoins(self): class Person(self.Person): address = ForeignKey(foreignKey="Address", dbName="address_id") class Address(self.SQLObject): city = StringCol() john = Person.selectOneBy(name="John Doe") result_set = Person.select("name = 'John Doe'", prejoins=["address"]) self.assertTrue(john in result_set) def test_table_dot_q(self): # Table.q.fieldname is a syntax used in SQLObject for # sqlbuilder expressions. Storm can use the main properties # for this, so the Table.q syntax just returns those # properties: class Person(self.SQLObject): _idName = "name" _idType = unicode address = ForeignKey(foreignKey="Phone", dbName="address_id", notNull=True) self.assertEquals(id(Person.q.id), id(Person.id)) self.assertEquals(id(Person.q.address), id(Person.address)) self.assertEquals(id(Person.q.addressID), id(Person.addressID)) person = Person.get("John Joe") self.assertEquals(id(person.q.id), id(Person.id)) self.assertEquals(id(person.q.address), id(Person.address)) self.assertEquals(id(person.q.addressID), id(Person.addressID)) def test_set(self): class Person(self.Person): def set(self, **kw): kw["id"] += 1 super(Person, self).set(**kw) person = Person(id=3, name="John Moe") self.assertEquals(person.id, 4) self.assertEquals(person.name, "John Moe") def test_CONTAINSSTRING(self): expr = CONTAINSSTRING(self.Person.q.name, "Do") result = self.Person.select(expr) self.assertEquals([person.name for person in result], ["John Doe"]) person.name = "Funny !%_ Name" expr = NOT(CONTAINSSTRING(self.Person.q.name, "!%_")) result = self.Person.select(expr) self.assertEquals([person.name for person in result], ["John Joe"])
def _merge_person_decoration(to_person, from_person, skip, decorator_table, person_pointer_column, additional_person_columns): """Merge a table that "decorates" Person. Because "person decoration" is becoming more frequent, we create a helper function that can be used for tables that decorate person. :to_person: the IPerson that is "real" :from_person: the IPerson that is being merged away :skip: a list of table/column pairs that have been handled :decorator_table: the name of the table that decorated Person :person_pointer_column: the column on decorator_table that UNIQUE'ly references Person.id :additional_person_columns: additional columns in the decorator_table that also reference Person.id but are not UNIQUE A Person decorator is a table that uniquely references Person, so that the information in the table "extends" the Person table. Because the reference to Person is unique, there can only be one row in the decorator table for any given Person. This function checks if there is an existing decorator for the to_person, and if so, it just leaves any from_person decorator in place as "noise". Otherwise, it updates any from_person decorator to point to the "to_person". There can also be other columns in the decorator which point to Person, these are assumed to be non-unique and will be updated to point to the to_person regardless. """ store = Store.of(to_person) # First, update the main UNIQUE pointer row which links the # decorator table to Person. We do not update rows if there are # already rows in the table that refer to the to_person store.execute( """UPDATE %(decorator)s SET %(person_pointer)s=%(to_id)d WHERE %(person_pointer)s=%(from_id)d AND ( SELECT count(*) FROM %(decorator)s WHERE %(person_pointer)s=%(to_id)d ) = 0 """ % { 'decorator': decorator_table, 'person_pointer': person_pointer_column, 'from_id': from_person.id, 'to_id': to_person.id}) # Now, update any additional columns in the table which point to # Person. Since these are assumed to be NOT UNIQUE, we don't # have to worry about multiple rows pointing at the to_person. for additional_column in additional_person_columns: store.execute( """UPDATE %(decorator)s SET %(column)s=%(to_id)d WHERE %(column)s=%(from_id)d """ % { 'decorator': decorator_table, 'from_id': from_person.id, 'to_id': to_person.id, 'column': additional_column}) skip.append( (decorator_table.lower(), person_pointer_column.lower()))
def store(self): return Store.of(self)
def delete(self): """See `IHWDeviceClass`.""" store = Store.of(self) store.remove(self)