def test_fileStoreFromPath(self): """ Verify that fileStoreFromPath() will return a CommonDataStore if the given path contains either "calendars" or "addressbooks" sub-directories. Otherwise it returns None """ # No child directories docRootPath = CachingFilePath(self.mktemp()) docRootPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertEquals(step, None) # "calendars" child directory exists childPath = docRootPath.child("calendars") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove() # "addressbooks" child directory exists childPath = docRootPath.child("addressbooks") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove()
def test_fileStoreFromPath(self): """ Verify that fileStoreFromPath() will return a CommonDataStore if the given path contains either "calendars" or "addressbooks" sub-directories. Otherwise it returns None """ # No child directories docRootPath = CachingFilePath(self.mktemp()) docRootPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertEquals(step, None) # "calendars" child directory exists childPath = docRootPath.child("calendars") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove() # "addressbooks" child directory exists childPath = docRootPath.child("addressbooks") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove()
def test_copy(self): tempDir = FilePath(self.mktemp()) tempDir.makedirs() tempFile1 = tempDir.child("test1") tempFile1.touch() tempFile2 = tempDir.child("test2") tempFile2.touch() # Existing store store1_user1 = PropertyStore("user01", lambda: tempFile1) store1_user2 = PropertyStore("user01", lambda: tempFile1) store1_user2._setPerUserUID("user02") # New store store2_user1 = PropertyStore("user01", lambda: tempFile2) store2_user2 = PropertyStore("user01", lambda: tempFile2) store2_user2._setPerUserUID("user02") # Populate current store with data class DummyProperty1(WebDAVTextElement): namespace = "http://calendarserver.org/ns/" name = "dummy1" class DummyProperty2(WebDAVTextElement): namespace = "http://calendarserver.org/ns/" name = "dummy2" class DummyProperty3(WebDAVTextElement): namespace = "http://calendarserver.org/ns/" name = "dummy3" props_user1 = ( DummyProperty1.fromString("value1-user1"), DummyProperty2.fromString("value2-user1"), ) props_user2 = ( DummyProperty1.fromString("value1-user2"), DummyProperty3.fromString("value3-user2"), ) for prop in props_user1: store1_user1[PropertyName.fromElement(prop)] = prop for prop in props_user2: store1_user2[PropertyName.fromElement(prop)] = prop store1_user1.flush() store1_user2.flush() # Do copy and check results store2_user1.copyAllProperties(store1_user1) store2_user1.flush() self.assertEqual(store1_user1.attrs.items(), store2_user1.attrs.items()) self.assertEqual(store1_user2.attrs.items(), store2_user2.attrs.items())
def test_copy(self): tempDir = FilePath(self.mktemp()) tempDir.makedirs() tempFile1 = tempDir.child("test1") tempFile1.touch() tempFile2 = tempDir.child("test2") tempFile2.touch() # Existing store store1_user1 = PropertyStore("user01", lambda : tempFile1) store1_user2 = PropertyStore("user01", lambda : tempFile1) store1_user2._setPerUserUID("user02") # New store store2_user1 = PropertyStore("user01", lambda : tempFile2) store2_user2 = PropertyStore("user01", lambda : tempFile2) store2_user2._setPerUserUID("user02") # Populate current store with data class DummyProperty1(WebDAVTextElement): namespace = "http://calendarserver.org/ns/" name = "dummy1" class DummyProperty2(WebDAVTextElement): namespace = "http://calendarserver.org/ns/" name = "dummy2" class DummyProperty3(WebDAVTextElement): namespace = "http://calendarserver.org/ns/" name = "dummy3" props_user1 = ( DummyProperty1.fromString("value1-user1"), DummyProperty2.fromString("value2-user1"), ) props_user2 = ( DummyProperty1.fromString("value1-user2"), DummyProperty3.fromString("value3-user2"), ) for prop in props_user1: store1_user1[PropertyName.fromElement(prop)] = prop for prop in props_user2: store1_user2[PropertyName.fromElement(prop)] = prop store1_user1.flush() store1_user2.flush() # Do copy and check results store2_user1.copyAllProperties(store1_user1) store2_user1.flush() self.assertEqual(store1_user1.attrs.items(), store2_user1.attrs.items()) self.assertEqual(store1_user2.attrs.items(), store2_user2.attrs.items())
def _connectorFor_pg8000(dbmodule, **kwargs): """ Turn properties into pg8000 kwargs """ params = DBAPIParameters(**kwargs) dbkwargs = { "user": params.user, "password": params.password, "database": params.database, } if params.unixsocket: dbkwargs["unix_sock"] = params.unixsocket # We're using a socket file socketFP = CachingFilePath(dbkwargs["unix_sock"]) if socketFP.isdir(): # We have been given the directory, not the actual socket file socketFP = socketFP.child(".s.PGSQL.{}".format(params.port if params.port else "5432")) dbkwargs["unix_sock"] = socketFP.path if not socketFP.isSocket(): raise InternalDataStoreError( "No such socket file: {}".format(socketFP.path) ) else: dbkwargs["host"] = params.host if params.port: dbkwargs["port"] = int(params.port) return DBAPIConnector(dbmodule, postgresPreflight, **dbkwargs)
def setUp(self): tempDir = FilePath(self.mktemp()) tempDir.makedirs() tempFile = tempDir.child("test") tempFile.touch() self.propertyStore = self.propertyStore1 = PropertyStore("user01", "user01", lambda : tempFile) self.propertyStore2 = PropertyStore("user02", "user01", lambda : tempFile)
def buildStore(self, testCase, notifierFactory): """ Do the necessary work to build a store for a particular test case. @return: a L{Deferred} which fires with an L{IDataStore}. """ disableMemcacheForTest(testCase) dbRoot = CachingFilePath(self.SHARED_DB_PATH) attachmentRoot = dbRoot.child("attachments") if self.sharedService is None: ready = Deferred() def getReady(connectionFactory): self.makeAndCleanStore( testCase, notifierFactory, attachmentRoot ).chainDeferred(ready) return Service() self.sharedService = self.createService(getReady) self.sharedService.startService() def startStopping(): log.msg("Starting stopping.") self.sharedService.unpauseMonitor() return self.sharedService.stopService() reactor.addSystemEventTrigger(#@UndefinedVariable "before", "shutdown", startStopping) result = ready else: result = self.makeAndCleanStore( testCase, notifierFactory, attachmentRoot ) def cleanUp(): def stopit(): self.sharedService.pauseMonitor() return deferLater(reactor, 0.1, stopit) testCase.addCleanup(cleanUp) return result
def _connectorFor_pg8000(dbmodule, **kwargs): """ Turn properties into pg8000 kwargs """ params = DBAPIParameters(**kwargs) dbkwargs = { "user": params.user, "password": params.password, "database": params.database, } if params.ssl: dbkwargs["ssl"] = params.ssl if params.unixsocket: dbkwargs["unix_sock"] = params.unixsocket # We're using a socket file socketFP = CachingFilePath(dbkwargs["unix_sock"]) if socketFP.isdir(): # We have been given the directory, not the actual socket file socketFP = socketFP.child(".s.PGSQL.{}".format(params.port if params.port else "5432")) dbkwargs["unix_sock"] = socketFP.path if not socketFP.isSocket(): raise InternalDataStoreError( "No such socket file: {}".format(socketFP.path) ) else: dbkwargs["host"] = params.host if params.port: dbkwargs["port"] = int(params.port) if "txnTimeoutSeconds" in kwargs: dbkwargs["txnTimeoutSeconds"] = kwargs["txnTimeoutSeconds"] return DBAPIConnector(dbmodule, pg8000Preflight, **dbkwargs)
def setUp(self): yield super(GroupShareeTestBase, self).setUp() accountsFilePath = FilePath( os.path.join(os.path.dirname(__file__), "accounts")) yield self.buildStoreAndDirectory( accounts=accountsFilePath.child("groupAccounts.xml"), ) yield self.populate() self.paths = {}
def doDirectoryTest(self, addedNames, modify=lambda x: None, expectedNames=None): """ Do a test of a L{DAVFile} pointed at a directory, verifying that files existing with the given names will be faithfully 'played back' via HTML rendering. """ if expectedNames is None: expectedNames = addedNames fp = FilePath(self.mktemp()) fp.createDirectory() for sampleName in expectedNames: fp.child(sampleName).touch() df = DAVFile(fp) modify(df) responseText = (yield df.render(SimpleFakeRequest('/'))).stream.read() responseXML = browserHTML2ETree(responseText) names = set([element.text.encode("utf-8") for element in responseXML.findall(".//a")]) self.assertEquals(set(expectedNames), names)
def doDirectoryTest(self, addedNames, modify=lambda x: None, expectedNames=None): """ Do a test of a L{DAVFile} pointed at a directory, verifying that files existing with the given names will be faithfully 'played back' via HTML rendering. """ if expectedNames is None: expectedNames = addedNames fp = FilePath(self.mktemp()) fp.createDirectory() for sampleName in expectedNames: fp.child(sampleName).touch() df = DAVFile(fp) modify(df) responseText = (yield df.render(SimpleFakeRequest('/'))).stream.read() responseXML = browserHTML2ETree(responseText) names = set([element.text.encode("utf-8") for element in responseXML.findall(".//a")]) self.assertEquals(set(expectedNames), names)
def setUp(self): yield super(GroupShareeTestBase, self).setUp() accountsFilePath = FilePath( os.path.join(os.path.dirname(__file__), "accounts") ) yield self.buildStoreAndDirectory( accounts=accountsFilePath.child("groupAccounts.xml"), ) yield self.populate() self.paths = {}
def buildStore( self, testCase, notifierFactory, directoryService=None, homes=None, enableJobProcessing=True, ): """ Do the necessary work to build a store for a particular test case. @return: a L{Deferred} which fires with an L{IDataStore}. """ disableMemcacheForTest(testCase) dbRoot = FilePath(self.sharedDBPath) attachmentRoot = dbRoot.child("attachments") # The directory will be given to us later via setDirectoryService if self.sharedService is None: ready = Deferred() def getReady(connectionFactory, storageService): self.makeAndCleanStore( testCase, notifierFactory, directoryService, attachmentRoot, enableJobProcessing).chainDeferred(ready) return Service() self.sharedService = self.createService(getReady) self.sharedService.startService() def startStopping(): log.info("Starting stopping.") self.sharedService.unpauseMonitor() return self.sharedService.stopService() reactor.addSystemEventTrigger("before", "shutdown", startStopping) result = ready else: result = self.makeAndCleanStore(testCase, notifierFactory, directoryService, attachmentRoot, enableJobProcessing) def cleanUp(): def stopit(): self.sharedService.pauseMonitor() return deferLater(reactor, 0.1, stopit) testCase.addCleanup(cleanUp) return result
def setUp(self): tempDir = FilePath(self.mktemp()) tempDir.makedirs() tempFile = tempDir.child("test") tempFile.touch() self.propertyStore = PropertyStore("user01", lambda : tempFile) self.propertyStore1 = self.propertyStore self.propertyStore2 = PropertyStore("user01", lambda : tempFile) self.propertyStore2._setPerUserUID("user02") self.propertyStore2._setProxyUID("user02") self.propertyStore3 = PropertyStore("user01", lambda : tempFile) self.propertyStore3._setProxyUID("user03") self.propertyStore4 = PropertyStore("user01", lambda : tempFile) self.propertyStore4._setPerUserUID("user02") self.propertyStore4._setProxyUID("user04")
def setUp(self): tempDir = FilePath(self.mktemp()) tempDir.makedirs() tempFile = tempDir.child("test") tempFile.touch() self.propertyStore = PropertyStore("user01", lambda: tempFile) self.propertyStore1 = self.propertyStore self.propertyStore2 = PropertyStore("user01", lambda: tempFile) self.propertyStore2._setPerUserUID("user02") self.propertyStore2._setProxyUID("user02") self.propertyStore3 = PropertyStore("user01", lambda: tempFile) self.propertyStore3._setProxyUID("user03") self.propertyStore4 = PropertyStore("user01", lambda: tempFile) self.propertyStore4._setPerUserUID("user02") self.propertyStore4._setProxyUID("user04")
def buildStore(self, testCase, notifierFactory, directoryService=None, homes=None, enableJobProcessing=True): """ Do the necessary work to build a store for a particular test case. @return: a L{Deferred} which fires with an L{IDataStore}. """ disableMemcacheForTest(testCase) dbRoot = FilePath(self.sharedDBPath) attachmentRoot = dbRoot.child("attachments") # The directory will be given to us later via setDirectoryService if self.sharedService is None: ready = Deferred() def getReady(connectionFactory, storageService): self.makeAndCleanStore( testCase, notifierFactory, directoryService, attachmentRoot, enableJobProcessing ).chainDeferred(ready) return Service() self.sharedService = self.createService(getReady) self.sharedService.startService() def startStopping(): log.info("Starting stopping.") self.sharedService.unpauseMonitor() return self.sharedService.stopService() reactor.addSystemEventTrigger("before", "shutdown", startStopping) result = ready else: result = self.makeAndCleanStore( testCase, notifierFactory, directoryService, attachmentRoot, enableJobProcessing ) def cleanUp(): def stopit(): self.sharedService.pauseMonitor() return deferLater(reactor, 0.1, stopit) testCase.addCleanup(cleanUp) return result
def test_triggerGroupCacherUpdate(self): """ Verify triggerGroupCacherUpdate can read a pidfile and send a SIGHUP """ self.calledArgs = None def killMethod(pid, sig): self.calledArgs = (pid, sig) class StubConfig(object): def __init__(self, runRootPath): self.RunRoot = runRootPath runRootDir = FilePath(self.mktemp()) runRootDir.createDirectory() pidFile = runRootDir.child("groupcacher.pid") pidFile.setContent("1234") testConfig = StubConfig(runRootDir.path) triggerGroupCacherUpdate(testConfig, killMethod=killMethod) self.assertEquals(self.calledArgs, (1234, signal.SIGHUP)) runRootDir.remove()
def setUp(self): super(DeprovisionTestCase, self).setUp() testRootPath = FilePath(__file__).sibling("deprovision") template = testRootPath.child("caldavd.plist").getContent() newConfig = template % { "ServerRoot" : os.path.abspath(config.ServerRoot), } configFilePath = FilePath(os.path.join(config.ConfigRoot, "caldavd.plist")) configFilePath.setContent(newConfig) self.configFileName = configFilePath.path config.load(self.configFileName) origUsersFile = FilePath(__file__).sibling( "deprovision").child("users-groups.xml") copyUsersFile = FilePath(config.DataRoot).child("accounts.xml") origUsersFile.copyTo(copyUsersFile) origResourcesFile = FilePath(__file__).sibling( "deprovision").child("resources-locations.xml") copyResourcesFile = FilePath(config.DataRoot).child("resources.xml") origResourcesFile.copyTo(copyResourcesFile) origAugmentFile = FilePath(__file__).sibling( "deprovision").child("augments.xml") copyAugmentFile = FilePath(config.DataRoot).child("augments.xml") origAugmentFile.copyTo(copyAugmentFile) self.rootResource = getRootResource(config) self.directory = self.rootResource.getDirectory() # Make sure trial puts the reactor in the right state, by letting it # run one reactor iteration. (Ignore me, please.) d = Deferred() reactor.callLater(0, d.callback, True) return d
class HomeMigrationTests(TestCase): """ Tests for L{UpgradeToDatabaseStep}. """ @inlineCallbacks def setUp(self): """ Set up two stores to migrate between. """ # Add some files to the file store. self.filesPath = CachingFilePath(self.mktemp()) self.filesPath.createDirectory() fileStore = self.fileStore = CommonDataStore( self.filesPath, {"push": StubNotifierFactory()}, TestStoreDirectoryService(), True, True ) self.sqlStore = yield theStoreBuilder.buildStore( self, StubNotifierFactory() ) self.upgrader = UpgradeToDatabaseStep(self.fileStore, self.sqlStore) requirements = CommonTests.requirements extras = deriveValue(self, "extraRequirements", lambda t: {}) requirements = self.mergeRequirements(requirements, extras) yield populateCalendarsFrom(requirements, fileStore) md5s = CommonTests.md5s yield resetCalendarMD5s(md5s, fileStore) self.filesPath.child("calendars").child( "__uids__").child("ho").child("me").child("home1").child( ".some-extra-data").setContent("some extra data") requirements = ABCommonTests.requirements yield populateAddressBooksFrom(requirements, fileStore) md5s = ABCommonTests.md5s yield resetAddressBookMD5s(md5s, fileStore) self.filesPath.child("addressbooks").child( "__uids__").child("ho").child("me").child("home1").child( ".some-extra-data").setContent("some extra data") def mergeRequirements(self, a, b): """ Merge two requirements dictionaries together, modifying C{a} and returning it. @param a: Some requirements, in the format of L{CommonTests.requirements}. @type a: C{dict} @param b: Some additional requirements, to be merged into C{a}. @type b: C{dict} @return: C{a} @rtype: C{dict} """ for homeUID in b: homereq = a.setdefault(homeUID, {}) homeExtras = b[homeUID] for calendarUID in homeExtras: calreq = homereq.setdefault(calendarUID, {}) calendarExtras = homeExtras[calendarUID] calreq.update(calendarExtras) return a @withSpecialValue( "extraRequirements", { "home1": { "calendar_1": { "bogus.ics": ( getModule("twistedcaldav").filePath.sibling("zoneinfo") .child("EST.ics").getContent(), CommonTests.metadata1 ) } } } ) @inlineCallbacks def test_unknownTypeNotMigrated(self): """ The only types of calendar objects that should get migrated are VEVENTs and VTODOs. Other component types, such as free-standing VTIMEZONEs, don't have a UID and can't be stored properly in the database, so they should not be migrated. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) self.assertIdentical( None, (yield (yield (yield (yield txn.calendarHomeWithUID("home1")) .calendarWithName("calendar_1"))) .calendarObjectWithName("bogus.ics")) ) @inlineCallbacks def test_upgradeCalendarHomes(self): """ L{UpgradeToDatabaseService.startService} will do the upgrade, then start its dependent service by adding it to its service hierarchy. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) for uid in CommonTests.requirements: if CommonTests.requirements[uid] is not None: self.assertNotIdentical( None, (yield txn.calendarHomeWithUID(uid)) ) # Successfully migrated calendar homes are deleted self.assertFalse(self.filesPath.child("calendars").child( "__uids__").child("ho").child("me").child("home1").exists()) # Want metadata preserved home = (yield txn.calendarHomeWithUID("home1")) calendar = (yield home.calendarWithName("calendar_1")) for name, metadata, md5 in ( ("1.ics", CommonTests.metadata1, CommonTests.md5Values[0]), ("2.ics", CommonTests.metadata2, CommonTests.md5Values[1]), ("3.ics", CommonTests.metadata3, CommonTests.md5Values[2]), ): object = (yield calendar.calendarObjectWithName(name)) self.assertEquals(object.getMetadata(), metadata) self.assertEquals(object.md5(), md5) @inlineCallbacks def test_upgradeExistingHome(self): """ L{UpgradeToDatabaseService.startService} will skip migrating existing homes. """ startTxn = self.sqlStore.newTransaction("populate empty sample") yield startTxn.calendarHomeWithUID("home1", create=True) yield startTxn.commit() yield self.upgrader.stepWithResult(None) vrfyTxn = self.sqlStore.newTransaction("verify sample still empty") self.addCleanup(vrfyTxn.commit) home = yield vrfyTxn.calendarHomeWithUID("home1") # The default calendar is still there. self.assertNotIdentical(None, (yield home.calendarWithName("calendar"))) # The migrated calendar isn't. self.assertIdentical(None, (yield home.calendarWithName("calendar_1"))) @inlineCallbacks def test_upgradeAttachments(self): """ L{UpgradeToDatabaseService.startService} upgrades calendar attachments as well. """ # Need to tweak config and settings to setup dropbox to work self.patch(config, "EnableDropBox", True) self.patch(config, "EnableManagedAttachments", False) self.sqlStore.enableManagedAttachments = False txn = self.sqlStore.newTransaction() cs = schema.CALENDARSERVER yield Delete( From=cs, Where=cs.NAME == "MANAGED-ATTACHMENTS" ).on(txn) yield txn.commit() txn = self.fileStore.newTransaction() committed = [] def maybeCommit(): if not committed: committed.append(True) return txn.commit() self.addCleanup(maybeCommit) @inlineCallbacks def getSampleObj(): home = (yield txn.calendarHomeWithUID("home1")) calendar = (yield home.calendarWithName("calendar_1")) object = (yield calendar.calendarObjectWithName("1.ics")) returnValue(object) inObject = yield getSampleObj() someAttachmentName = "some-attachment" someAttachmentType = MimeType.fromString("application/x-custom-type") attachment = yield inObject.createAttachmentWithName( someAttachmentName, ) transport = attachment.store(someAttachmentType) someAttachmentData = "Here is some data for your attachment, enjoy." transport.write(someAttachmentData) yield transport.loseConnection() yield maybeCommit() yield self.upgrader.stepWithResult(None) committed = [] txn = self.sqlStore.newTransaction() outObject = yield getSampleObj() outAttachment = yield outObject.attachmentWithName(someAttachmentName) allDone = Deferred() class SimpleProto(Protocol): data = '' def dataReceived(self, data): self.data += data def connectionLost(self, reason): allDone.callback(self.data) self.assertEquals(outAttachment.contentType(), someAttachmentType) outAttachment.retrieve(SimpleProto()) allData = yield allDone self.assertEquals(allData, someAttachmentData) @inlineCallbacks def test_upgradeAddressBookHomes(self): """ L{UpgradeToDatabaseService.startService} will do the upgrade, then start its dependent service by adding it to its service hierarchy. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) for uid in ABCommonTests.requirements: if ABCommonTests.requirements[uid] is not None: self.assertNotIdentical( None, (yield txn.addressbookHomeWithUID(uid)) ) # Successfully migrated addressbook homes are deleted self.assertFalse(self.filesPath.child("addressbooks").child( "__uids__").child("ho").child("me").child("home1").exists()) # Want metadata preserved home = (yield txn.addressbookHomeWithUID("home1")) adbk = (yield home.addressbookWithName("addressbook")) for name, md5 in ( ("1.vcf", ABCommonTests.md5Values[0]), ("2.vcf", ABCommonTests.md5Values[1]), ("3.vcf", ABCommonTests.md5Values[2]), ): object = (yield adbk.addressbookObjectWithName(name)) self.assertEquals(object.md5(), md5) def test_fileStoreFromPath(self): """ Verify that fileStoreFromPath() will return a CommonDataStore if the given path contains either "calendars" or "addressbooks" sub-directories. Otherwise it returns None """ # No child directories docRootPath = CachingFilePath(self.mktemp()) docRootPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertEquals(step, None) # "calendars" child directory exists childPath = docRootPath.child("calendars") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove() # "addressbooks" child directory exists childPath = docRootPath.child("addressbooks") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove()
from txdav.common.icommondatastore import ObjectResourceNameAlreadyExistsError from txcarddav.iaddressbookstore import ( IAddressBookObject, IAddressBookHome, IAddressBook, IAddressBookTransaction ) from twistedcaldav.vcard import Component as VComponent from twext.python.filepath import CachingFilePath as FilePath from twext.web2.dav import davxml from twext.web2.dav.element.base import WebDAVUnknownElement storePath = FilePath(__file__).parent().child("addressbook_store") homeRoot = storePath.child("ho").child("me").child("home1") adbk1Root = homeRoot.child("addressbook_1") addressbook1_objectNames = [ "1.vcf", "2.vcf", "3.vcf", ] home1_addressbookNames = [ "addressbook_1", "addressbook_2", "addressbook_empty", ]
class HomeMigrationTests(CommonCommonTests, TestCase): """ Tests for L{UpgradeToDatabaseStep}. """ av1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 CALSCALE:GREGORIAN PRODID:-//calendarserver.org//Zonal//EN BEGIN:VAVAILABILITY ORGANIZER:mailto:[email protected] UID:[email protected] DTSTAMP:20061005T133225Z DTEND:20140101T000000Z BEGIN:AVAILABLE UID:[email protected] DTSTAMP:20061005T133225Z SUMMARY:Monday to Friday from 9:00 to 17:00 DTSTART:20130101T090000Z DTEND:20130101T170000Z RRULE:FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR END:AVAILABLE END:VAVAILABILITY END:VCALENDAR """) @inlineCallbacks def setUp(self): """ Set up two stores to migrate between. """ yield super(HomeMigrationTests, self).setUp() yield self.buildStoreAndDirectory(extraUids=( u"home1", u"home2", u"home3", u"home_defaults", u"home_no_splits", u"home_splits", u"home_splits_shared", )) self.sqlStore = self.store # Add some files to the file store. self.filesPath = CachingFilePath(self.mktemp()) self.filesPath.createDirectory() fileStore = self.fileStore = CommonDataStore( self.filesPath, {"push": StubNotifierFactory()}, self.directory, True, True) self.upgrader = UpgradeToDatabaseStep(self.fileStore, self.sqlStore) requirements = CommonTests.requirements extras = deriveValue(self, "extraRequirements", lambda t: {}) requirements = self.mergeRequirements(requirements, extras) yield populateCalendarsFrom(requirements, fileStore) md5s = CommonTests.md5s yield resetCalendarMD5s(md5s, fileStore) self.filesPath.child("calendars").child("__uids__").child("ho").child( "me").child("home1").child(".some-extra-data").setContent( "some extra data") requirements = ABCommonTests.requirements yield populateAddressBooksFrom(requirements, fileStore) md5s = ABCommonTests.md5s yield resetAddressBookMD5s(md5s, fileStore) self.filesPath.child("addressbooks").child("__uids__").child( "ho").child("me").child("home1").child( ".some-extra-data").setContent("some extra data") # Add some properties we want to check get migrated over txn = self.fileStore.newTransaction() home = yield txn.calendarHomeWithUID("home_defaults") cal = yield home.calendarWithName("calendar_1") props = cal.properties() props[PropertyName.fromElement( caldavxml.SupportedCalendarComponentSet )] = caldavxml.SupportedCalendarComponentSet( caldavxml.CalendarComponent(name="VEVENT"), caldavxml.CalendarComponent(name="VTODO"), ) props[PropertyName.fromElement( element.ResourceType)] = element.ResourceType( element.Collection(), caldavxml.Calendar(), ) props[PropertyName.fromElement( customxml.GETCTag)] = customxml.GETCTag.fromString("foobar") inbox = yield home.calendarWithName("inbox") props = inbox.properties() props[PropertyName.fromElement( customxml.CalendarAvailability )] = customxml.CalendarAvailability.fromString(str(self.av1)) props[PropertyName.fromElement( caldavxml.ScheduleDefaultCalendarURL )] = caldavxml.ScheduleDefaultCalendarURL( element.HRef.fromString( "/calendars/__uids__/home_defaults/calendar_1"), ) yield txn.commit() def mergeRequirements(self, a, b): """ Merge two requirements dictionaries together, modifying C{a} and returning it. @param a: Some requirements, in the format of L{CommonTests.requirements}. @type a: C{dict} @param b: Some additional requirements, to be merged into C{a}. @type b: C{dict} @return: C{a} @rtype: C{dict} """ for homeUID in b: homereq = a.setdefault(homeUID, {}) homeExtras = b[homeUID] for calendarUID in homeExtras: calreq = homereq.setdefault(calendarUID, {}) calendarExtras = homeExtras[calendarUID] calreq.update(calendarExtras) return a @withSpecialValue( "extraRequirements", { "home1": { "calendar_1": { "bogus.ics": (getModule("twistedcaldav").filePath.sibling("zoneinfo"). child("EST.ics").getContent(), CommonTests.metadata1) } } }) @inlineCallbacks def test_unknownTypeNotMigrated(self): """ The only types of calendar objects that should get migrated are VEVENTs and VTODOs. Other component types, such as free-standing VTIMEZONEs, don't have a UID and can't be stored properly in the database, so they should not be migrated. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) self.assertIdentical( None, (yield (yield (yield (yield txn.calendarHomeWithUID("home1")).calendarWithName( "calendar_1"))).calendarObjectWithName("bogus.ics"))) @inlineCallbacks def test_upgradeCalendarHomes(self): """ L{UpgradeToDatabaseService.startService} will do the upgrade, then start its dependent service by adding it to its service hierarchy. """ # Create a fake directory in the same place as a home, but with a non-existent uid fake_dir = self.filesPath.child("calendars").child("__uids__").child( "ho").child("me").child("foobar") fake_dir.makedirs() # Create a fake file in the same place as a home,with a name that matches the hash uid prefix fake_file = self.filesPath.child("calendars").child("__uids__").child( "ho").child("me").child("home_file") fake_file.setContent("") yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) for uid in CommonTests.requirements: if CommonTests.requirements[uid] is not None: self.assertNotIdentical(None, (yield txn.calendarHomeWithUID(uid))) # Successfully migrated calendar homes are deleted self.assertFalse( self.filesPath.child("calendars").child("__uids__").child( "ho").child("me").child("home1").exists()) # Want metadata preserved home = (yield txn.calendarHomeWithUID("home1")) calendar = (yield home.calendarWithName("calendar_1")) for name, metadata, md5 in ( ("1.ics", CommonTests.metadata1, CommonTests.md5Values[0]), ("2.ics", CommonTests.metadata2, CommonTests.md5Values[1]), ("3.ics", CommonTests.metadata3, CommonTests.md5Values[2]), ): object = (yield calendar.calendarObjectWithName(name)) self.assertEquals(object.getMetadata(), metadata) self.assertEquals(object.md5(), md5) @withSpecialValue("extraRequirements", {"nonexistent": {"calendar_1": {}}}) @inlineCallbacks def test_upgradeCalendarHomesMissingDirectoryRecord(self): """ Test an upgrade where a directory record is missing for a home; the original home directory will remain on disk. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) for uid in CommonTests.requirements: if CommonTests.requirements[uid] is not None: self.assertNotIdentical(None, (yield txn.calendarHomeWithUID(uid))) self.assertIdentical(None, (yield txn.calendarHomeWithUID(u"nonexistent"))) # Skipped calendar homes are not deleted self.assertTrue( self.filesPath.child("calendars").child("__uids__").child( "no").child("ne").child("nonexistent").exists()) @inlineCallbacks def test_upgradeExistingHome(self): """ L{UpgradeToDatabaseService.startService} will skip migrating existing homes. """ startTxn = self.sqlStore.newTransaction("populate empty sample") yield startTxn.calendarHomeWithUID("home1", create=True) yield startTxn.commit() yield self.upgrader.stepWithResult(None) vrfyTxn = self.sqlStore.newTransaction("verify sample still empty") self.addCleanup(vrfyTxn.commit) home = yield vrfyTxn.calendarHomeWithUID("home1") # The default calendar is still there. self.assertNotIdentical(None, (yield home.calendarWithName("calendar"))) # The migrated calendar isn't. self.assertIdentical(None, (yield home.calendarWithName("calendar_1"))) @inlineCallbacks def test_upgradeAttachments(self): """ L{UpgradeToDatabaseService.startService} upgrades calendar attachments as well. """ # Need to tweak config and settings to setup dropbox to work self.patch(config, "EnableDropBox", True) self.patch(config, "EnableManagedAttachments", False) self.sqlStore.enableManagedAttachments = False txn = self.sqlStore.newTransaction() cs = schema.CALENDARSERVER yield Delete(From=cs, Where=cs.NAME == "MANAGED-ATTACHMENTS").on(txn) yield txn.commit() txn = self.fileStore.newTransaction() committed = [] def maybeCommit(): if not committed: committed.append(True) return txn.commit() self.addCleanup(maybeCommit) @inlineCallbacks def getSampleObj(): home = (yield txn.calendarHomeWithUID("home1")) calendar = (yield home.calendarWithName("calendar_1")) object = (yield calendar.calendarObjectWithName("1.ics")) returnValue(object) inObject = yield getSampleObj() someAttachmentName = "some-attachment" someAttachmentType = MimeType.fromString("application/x-custom-type") attachment = yield inObject.createAttachmentWithName( someAttachmentName, ) transport = attachment.store(someAttachmentType) someAttachmentData = "Here is some data for your attachment, enjoy." transport.write(someAttachmentData) yield transport.loseConnection() yield maybeCommit() yield self.upgrader.stepWithResult(None) committed = [] txn = self.sqlStore.newTransaction() outObject = yield getSampleObj() outAttachment = yield outObject.attachmentWithName(someAttachmentName) allDone = Deferred() class SimpleProto(Protocol): data = '' def dataReceived(self, data): self.data += data def connectionLost(self, reason): allDone.callback(self.data) self.assertEquals(outAttachment.contentType(), someAttachmentType) outAttachment.retrieve(SimpleProto()) allData = yield allDone self.assertEquals(allData, someAttachmentData) @inlineCallbacks def test_upgradeAddressBookHomes(self): """ L{UpgradeToDatabaseService.startService} will do the upgrade, then start its dependent service by adding it to its service hierarchy. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) for uid in ABCommonTests.requirements: if ABCommonTests.requirements[uid] is not None: self.assertNotIdentical( None, (yield txn.addressbookHomeWithUID(uid))) # Successfully migrated addressbook homes are deleted self.assertFalse( self.filesPath.child("addressbooks").child("__uids__").child( "ho").child("me").child("home1").exists()) # Want metadata preserved home = (yield txn.addressbookHomeWithUID("home1")) adbk = (yield home.addressbookWithName("addressbook")) for name, md5 in ( ("1.vcf", ABCommonTests.md5Values[0]), ("2.vcf", ABCommonTests.md5Values[1]), ("3.vcf", ABCommonTests.md5Values[2]), ): object = (yield adbk.addressbookObjectWithName(name)) self.assertEquals(object.md5(), md5) @inlineCallbacks def test_upgradeProperties(self): """ L{UpgradeToDatabaseService.startService} will do the upgrade, then start its dependent service by adding it to its service hierarchy. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) # Want metadata preserved home = (yield txn.calendarHomeWithUID("home_defaults")) cal = (yield home.calendarWithName("calendar_1")) inbox = (yield home.calendarWithName("inbox")) # Supported components self.assertEqual(cal.getSupportedComponents(), "VEVENT") self.assertTrue(cal.properties().get( PropertyName.fromElement(caldavxml.SupportedCalendarComponentSet)) is None) # Resource type removed self.assertTrue(cal.properties().get( PropertyName.fromElement(element.ResourceType)) is None) # Ctag removed self.assertTrue(cal.properties().get( PropertyName.fromElement(customxml.GETCTag)) is None) # Availability self.assertEquals(str(home.getAvailability()), str(self.av1)) self.assertTrue(inbox.properties().get( PropertyName.fromElement(customxml.CalendarAvailability)) is None) # Default calendar self.assertTrue(home.isDefaultCalendar(cal)) self.assertTrue(inbox.properties().get( PropertyName.fromElement(caldavxml.ScheduleDefaultCalendarURL)) is None) def test_fileStoreFromPath(self): """ Verify that fileStoreFromPath() will return a CommonDataStore if the given path contains either "calendars" or "addressbooks" sub-directories. Otherwise it returns None """ # No child directories docRootPath = CachingFilePath(self.mktemp()) docRootPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertEquals(step, None) # "calendars" child directory exists childPath = docRootPath.child("calendars") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove() # "addressbooks" child directory exists childPath = docRootPath.child("addressbooks") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove()
def getRootResource(config, resources=None): """ Set up directory service and resource hierarchy based on config. Return root resource. Additional resources can be added to the hierarchy by passing a list of tuples containing: path, resource class, __init__ args list, and optional authentication scheme ("basic" or "digest"). """ # FIXME: this is only here to workaround circular imports doBind() # # Default resource classes # rootResourceClass = RootResource principalResourceClass = DirectoryPrincipalProvisioningResource calendarResourceClass = DirectoryCalendarHomeProvisioningResource iScheduleResourceClass = IScheduleInboxResource timezoneServiceResourceClass = TimezoneServiceResource webCalendarResourceClass = WebCalendarResource webAdminResourceClass = WebAdminResource addressBookResourceClass = DirectoryAddressBookHomeProvisioningResource directoryBackedAddressBookResourceClass = DirectoryBackedAddressBookResource # # Setup the Directory # directories = [] directoryClass = namedClass(config.DirectoryService.type) log.info("Configuring directory service of type: %s" % (config.DirectoryService.type,)) baseDirectory = directoryClass(config.DirectoryService.params) # Wait for the directory to become available while not baseDirectory.isAvailable(): sleep(5) directories.append(baseDirectory) # # Setup the Locations and Resources Service # if config.ResourceService.Enabled: resourceClass = namedClass(config.ResourceService.type) log.info("Configuring resource service of type: %s" % (resourceClass,)) resourceDirectory = resourceClass(config.ResourceService.params) resourceDirectory.realmName = baseDirectory.realmName directories.append(resourceDirectory) # # Add sudoers directory # sudoDirectory = None if config.SudoersFile and os.path.exists(config.SudoersFile): log.info("Configuring SudoDirectoryService with file: %s" % (config.SudoersFile,)) sudoDirectory = SudoDirectoryService(config.SudoersFile) sudoDirectory.realmName = baseDirectory.realmName CalDAVResource.sudoDirectory = sudoDirectory directories.insert(0, sudoDirectory) else: log.info( "Not using SudoDirectoryService; file doesn't exist: %s" % (config.SudoersFile,) ) # # Add wiki directory service # if config.Authentication.Wiki.Enabled: wikiDirectory = WikiDirectoryService() wikiDirectory.realmName = baseDirectory.realmName directories.append(wikiDirectory) # # Add internal directory service # Right now we only use this for CardDAV # if config.EnableCardDAV: internalDirectory = InternalDirectoryService(baseDirectory.realmName) directories.append(internalDirectory) directory = AggregateDirectoryService(directories) if sudoDirectory: directory.userRecordTypes.insert(0, SudoDirectoryService.recordType_sudoers) # # Use system-wide realm on OSX # try: import ServerFoundation realmName = ServerFoundation.XSAuthenticator.defaultRealm().encode("utf-8") directory.setRealm(realmName) except ImportError: pass # # Setup the Augment Service # augmentClass = namedClass(config.AugmentService.type) log.info("Configuring augment service of type: %s" % (augmentClass,)) try: augment.AugmentService = augmentClass(**config.AugmentService.params) except IOError: log.error("Could not start augment service") raise # # Setup the ProxyDB Service # proxydbClass = namedClass(config.ProxyDBService.type) log.info("Configuring proxydb service of type: %s" % (proxydbClass,)) try: calendaruserproxy.ProxyDBService = proxydbClass(**config.ProxyDBService.params) except IOError: log.error("Could not start proxydb service") raise # # Configure Memcached Client Pool # memcachepool.installPools( config.Memcached.Pools, config.Memcached.MaxClients, ) # # Configure the Site and Wrappers # credentialFactories = [] portal = Portal(auth.DavRealm()) portal.registerChecker(directory) realm = directory.realmName or "" log.info("Configuring authentication for realm: %s" % (realm,)) for scheme, schemeConfig in config.Authentication.iteritems(): scheme = scheme.lower() credFactory = None if schemeConfig["Enabled"]: log.info("Setting up scheme: %s" % (scheme,)) if scheme == "kerberos": if not NegotiateCredentialFactory: log.info("Kerberos support not available") continue try: principal = schemeConfig["ServicePrincipal"] if not principal: credFactory = NegotiateCredentialFactory( type="HTTP", hostname=config.ServerHostName, ) else: credFactory = NegotiateCredentialFactory( principal=principal, ) except ValueError: log.info("Could not start Kerberos") continue elif scheme == "digest": credFactory = QopDigestCredentialFactory( schemeConfig["Algorithm"], schemeConfig["Qop"], realm, ) elif scheme == "basic": credFactory = BasicCredentialFactory(realm) elif scheme == "wiki": pass else: log.error("Unknown scheme: %s" % (scheme,)) if credFactory: credentialFactories.append(credFactory) # # Setup Resource hierarchy # log.info("Setting up document root at: %s" % (config.DocumentRoot,)) log.info("Setting up principal collection: %r" % (principalResourceClass,)) principalCollection = principalResourceClass("/principals/", directory) # # Configure NotifierFactory # if config.Notifications.Enabled: notifierFactory = NotifierFactory( config.Notifications.InternalNotificationHost, config.Notifications.InternalNotificationPort, ) else: notifierFactory = None if config.UseDatabase: _dbRoot = CachingFilePath(config.DatabaseRoot) _postgresService = PostgresService(_dbRoot, None, v1_schema, "caldav", logFile=config.PostgresLogFile) _newStore = CommonSQLDataStore(_postgresService.produceConnection, notifierFactory, _dbRoot.child("attachments"), config.EnableCalDAV, config.EnableCardDAV) else: _newStore = CommonFileDataStore(FilePath(config.DocumentRoot), notifierFactory, config.EnableCalDAV, config.EnableCardDAV) if config.EnableCalDAV: log.info("Setting up calendar collection: %r" % (calendarResourceClass,)) calendarCollection = calendarResourceClass( directory, "/calendars/", _newStore, ) if config.EnableCardDAV: log.info("Setting up address book collection: %r" % (addressBookResourceClass,)) addressBookCollection = addressBookResourceClass( directory, "/addressbooks/", _newStore, ) directoryPath = os.path.join(config.DocumentRoot, config.DirectoryAddressBook.name) if config.DirectoryAddressBook.Enabled and config.EnableSearchAddressBook: log.info("Setting up directory address book: %r" % (directoryBackedAddressBookResourceClass,)) directoryBackedAddressBookCollection = directoryBackedAddressBookResourceClass( principalCollections=(principalCollection,) ) addSystemEventTrigger("after", "startup", directoryBackedAddressBookCollection.provisionDirectory) else: # remove /directory from previous runs that may have created it try: FilePath(directoryPath).remove() log.info("Deleted: %s" % directoryPath) except (OSError, IOError), e: if e.errno != errno.ENOENT: log.error("Could not delete: %s : %r" % (directoryPath, e,))
] DelayedCall.debug = True def _todo(f, why): f.todo = why return f featureUnimplemented = lambda f: _todo(f, "Feature unimplemented") testUnimplemented = lambda f: _todo(f, "Test unimplemented") todo = lambda why: lambda f: _todo(f, why) dirTest = FilePath(__file__).parent().sibling("directory").child("test") xmlFile = dirTest.child("accounts.xml") resourcesFile = dirTest.child("resources.xml") augmentsFile = dirTest.child("augments.xml") proxiesFile = dirTest.child("proxies.xml") class SimpleStoreRequest(SimpleRequest): """ A SimpleRequest that automatically grabs the proper transaction for a test. """ def __init__(self, test, method, uri, headers=None, content=None,
class File(StaticRenderMixin): """ File is a resource that represents a plain non-interpreted file (although it can look for an extension like .rpy or .cgi and hand the file to a processor for interpretation if you wish). Its constructor takes a file path. Alternatively, you can give a directory path to the constructor. In this case the resource will represent that directory, and its children will be files underneath that directory. This provides access to an entire filesystem tree with a single Resource. If you map the URL C{http://server/FILE} to a resource created as File('/tmp'), C{http://server/FILE/foo/bar.html} will return the contents of C{/tmp/foo/bar.html} . """ implements(iweb.IResource) def _getContentTypes(self): if not hasattr(File, "_sharedContentTypes"): File._sharedContentTypes = loadMimeTypes() return File._sharedContentTypes contentTypes = property(_getContentTypes) contentEncodings = {".gz": "gzip", ".bz2": "bzip2"} processors = {} indexNames = ["index", "index.html", "index.htm", "index.trp", "index.rpy"] type = None def __init__(self, path, defaultType="text/plain", ignoredExts=(), processors=None, indexNames=None): """Create a file with the given path. """ super(File, self).__init__() self.putChildren = {} if isinstance(path, FilePath): self.fp = path else: assert isinstance(path, str), "This should be a string." self.fp = FilePath(path) # Remove the dots from the path to split self.defaultType = defaultType self.ignoredExts = list(ignoredExts) if processors is not None: self.processors = dict([(key.lower(), value) for key, value in processors.items()]) if indexNames is not None: self.indexNames = indexNames def comparePath(self, path): if isinstance(path, FilePath): return path.path == self.fp.path else: return path == self.fp.path def exists(self): return self.fp.exists() def etag(self): if not self.fp.exists(): return succeed(None) st = self.fp.statinfo # # Mark ETag as weak if it was modified more recently than we can # measure and report, as it could be modified again in that span # and we then wouldn't know to provide a new ETag. # weak = (time.time() - st.st_mtime <= 1) return succeed( http_headers.ETag("%X-%X-%X" % (st.st_ino, st.st_size, st.st_mtime), weak=weak)) def lastModified(self): if self.fp.exists(): return self.fp.getmtime() else: return None def creationDate(self): if self.fp.exists(): return self.fp.getmtime() else: return None def contentLength(self): if self.fp.exists(): if self.fp.isfile(): return self.fp.getsize() else: # Computing this would require rendering the resource; let's # punt instead. return None else: return None def _initTypeAndEncoding(self): self._type, self._encoding = getTypeAndEncoding( self.fp.basename(), self.contentTypes, self.contentEncodings, self.defaultType) # Handle cases not covered by getTypeAndEncoding() if self.fp.isdir(): self._type = "httpd/unix-directory" def contentType(self): if not hasattr(self, "_type"): self._initTypeAndEncoding() return http_headers.MimeType.fromString(self._type) def contentEncoding(self): if not hasattr(self, "_encoding"): self._initTypeAndEncoding() return self._encoding def displayName(self): if self.fp.exists(): return self.fp.basename() else: return None def ignoreExt(self, ext): """Ignore the given extension. Serve file.ext if file is requested """ self.ignoredExts.append(ext) def putChild(self, name, child): """ Register a child with the given name with this resource. @param name: the name of the child (a URI path segment) @param child: the child to register """ self.putChildren[name] = child def getChild(self, name): """ Look up a child resource. @return: the child of this resource with the given name. """ if name == "": return self child = self.putChildren.get(name, None) if child: return child child_fp = self.fp.child(name) if hasattr(self, "knownChildren"): if name in self.knownChildren: child_fp.existsCached = True if child_fp.exists(): return self.createSimilarFile(child_fp) else: return None def listChildren(self): """ @return: a sequence of the names of all known children of this resource. """ children = self.putChildren.keys() if self.fp.isdir(): children += [c for c in self.fp.listdir() if c not in children] self.knownChildren = set(children) return children def locateChild(self, req, segments): """ See L{IResource}C{.locateChild}. """ # If getChild() finds a child resource, return it child = self.getChild(segments[0]) if child is not None: return (child, segments[1:]) # If we're not backed by a directory, we have no children. # But check for existance first; we might be a collection resource # that the request wants created. self.fp.restat(False) if self.fp.exists() and not self.fp.isdir(): return (None, ()) # OK, we need to return a child corresponding to the first segment path = segments[0] if path: fpath = self.fp.child(path) else: # Request is for a directory (collection) resource return (self, server.StopTraversal) # Don't run processors on directories - if someone wants their own # customized directory rendering, subclass File instead. if fpath.isfile(): processor = self.processors.get(fpath.splitext()[1].lower()) if processor: return (processor(fpath.path), segments[1:]) elif not fpath.exists(): sibling_fpath = fpath.siblingExtensionSearch(*self.ignoredExts) if sibling_fpath is not None: fpath = sibling_fpath return self.createSimilarFile(fpath.path), segments[1:] def renderHTTP(self, req): self.fp.changed() return super(File, self).renderHTTP(req) def render(self, req): """You know what you doing.""" if not self.fp.exists(): return responsecode.NOT_FOUND if self.fp.isdir(): if req.path[-1] != "/": # Redirect to include trailing '/' in URI return http.RedirectResponse( req.unparseURL(path=req.path + '/')) else: ifp = self.fp.childSearchPreauth(*self.indexNames) if ifp: # Render from the index file standin = self.createSimilarFile(ifp.path) else: # Directory listing is in twistedcaldav.extensions standin = Data( "\n".join(["Directory: " + str(req.path), "---"] + [ x.basename() + ("/" if x.isdir() else "") for x in self.fp.children() ]), "text/plain") return standin.render(req) try: f = self.fp.open() except IOError, e: import errno if e[0] == errno.EACCES: return responsecode.FORBIDDEN elif e[0] == errno.ENOENT: return responsecode.NOT_FOUND else: raise response = http.Response() response.stream = stream.FileStream(f, 0, self.fp.getsize()) for (header, value) in ( ("content-type", self.contentType()), ("content-encoding", self.contentEncoding()), ): if value is not None: response.headers.setHeader(header, value) return response
class File(StaticRenderMixin): """ File is a resource that represents a plain non-interpreted file (although it can look for an extension like .rpy or .cgi and hand the file to a processor for interpretation if you wish). Its constructor takes a file path. Alternatively, you can give a directory path to the constructor. In this case the resource will represent that directory, and its children will be files underneath that directory. This provides access to an entire filesystem tree with a single Resource. If you map the URL C{http://server/FILE} to a resource created as File('/tmp'), C{http://server/FILE/foo/bar.html} will return the contents of C{/tmp/foo/bar.html} . """ implements(iweb.IResource) def _getContentTypes(self): if not hasattr(File, "_sharedContentTypes"): File._sharedContentTypes = loadMimeTypes() return File._sharedContentTypes contentTypes = property(_getContentTypes) contentEncodings = { ".gz" : "gzip", ".bz2": "bzip2" } processors = {} indexNames = ["index", "index.html", "index.htm", "index.trp", "index.rpy"] type = None def __init__(self, path, defaultType="text/plain", ignoredExts=(), processors=None, indexNames=None): """Create a file with the given path. """ super(File, self).__init__() self.putChildren = {} if isinstance(path, FilePath): self.fp = path else: assert isinstance(path, str), "This should be a string." self.fp = FilePath(path) # Remove the dots from the path to split self.defaultType = defaultType self.ignoredExts = list(ignoredExts) if processors is not None: self.processors = dict([ (key.lower(), value) for key, value in processors.items() ]) if indexNames is not None: self.indexNames = indexNames def comparePath(self, path): if isinstance(path, FilePath): return path.path == self.fp.path else: return path == self.fp.path def exists(self): return self.fp.exists() def etag(self): if not self.fp.exists(): return succeed(None) st = self.fp.statinfo # # Mark ETag as weak if it was modified more recently than we can # measure and report, as it could be modified again in that span # and we then wouldn't know to provide a new ETag. # weak = (time.time() - st.st_mtime <= 1) return succeed(http_headers.ETag( "%X-%X-%X" % (st.st_ino, st.st_size, st.st_mtime), weak=weak )) def lastModified(self): if self.fp.exists(): return self.fp.getmtime() else: return None def creationDate(self): if self.fp.exists(): return self.fp.getmtime() else: return None def contentLength(self): if self.fp.exists(): if self.fp.isfile(): return self.fp.getsize() else: # Computing this would require rendering the resource; let's # punt instead. return None else: return None def _initTypeAndEncoding(self): self._type, self._encoding = getTypeAndEncoding( self.fp.basename(), self.contentTypes, self.contentEncodings, self.defaultType ) # Handle cases not covered by getTypeAndEncoding() if self.fp.isdir(): self._type = "httpd/unix-directory" def contentType(self): if not hasattr(self, "_type"): self._initTypeAndEncoding() return http_headers.MimeType.fromString(self._type) def contentEncoding(self): if not hasattr(self, "_encoding"): self._initTypeAndEncoding() return self._encoding def displayName(self): if self.fp.exists(): return self.fp.basename() else: return None def ignoreExt(self, ext): """Ignore the given extension. Serve file.ext if file is requested """ self.ignoredExts.append(ext) def putChild(self, name, child): """ Register a child with the given name with this resource. @param name: the name of the child (a URI path segment) @param child: the child to register """ self.putChildren[name] = child def getChild(self, name): """ Look up a child resource. @return: the child of this resource with the given name. """ if name == "": return self child = self.putChildren.get(name, None) if child: return child child_fp = self.fp.child(name) if hasattr(self, "knownChildren"): if name in self.knownChildren: child_fp.existsCached = True if child_fp.exists(): return self.createSimilarFile(child_fp) else: return None def listChildren(self): """ @return: a sequence of the names of all known children of this resource. """ children = self.putChildren.keys() if self.fp.isdir(): children += [c for c in self.fp.listdir() if c not in children] self.knownChildren = set(children) return children def locateChild(self, req, segments): """ See L{IResource}C{.locateChild}. """ # If getChild() finds a child resource, return it child = self.getChild(segments[0]) if child is not None: return (child, segments[1:]) # If we're not backed by a directory, we have no children. # But check for existance first; we might be a collection resource # that the request wants created. self.fp.restat(False) if self.fp.exists() and not self.fp.isdir(): return (None, ()) # OK, we need to return a child corresponding to the first segment path = segments[0] if path: fpath = self.fp.child(path) else: # Request is for a directory (collection) resource return (self, server.StopTraversal) # Don't run processors on directories - if someone wants their own # customized directory rendering, subclass File instead. if fpath.isfile(): processor = self.processors.get(fpath.splitext()[1].lower()) if processor: return ( processor(fpath.path), segments[1:]) elif not fpath.exists(): sibling_fpath = fpath.siblingExtensionSearch(*self.ignoredExts) if sibling_fpath is not None: fpath = sibling_fpath return self.createSimilarFile(fpath.path), segments[1:] def renderHTTP(self, req): self.fp.changed() return super(File, self).renderHTTP(req) def render(self, req): """You know what you doing.""" if not self.fp.exists(): return responsecode.NOT_FOUND if self.fp.isdir(): if req.path[-1] != "/": # Redirect to include trailing '/' in URI return http.RedirectResponse(req.unparseURL(path=req.path+'/')) else: ifp = self.fp.childSearchPreauth(*self.indexNames) if ifp: # Render from the index file standin = self.createSimilarFile(ifp.path) else: # Directory listing is in twistedcaldav.extensions standin = Data( "\n".join(["Directory: " + str(req.path), "---"] + [x.basename() + ("/" if x.isdir() else "") for x in self.fp.children()]), "text/plain") return standin.render(req) try: f = self.fp.open() except IOError, e: import errno if e[0] == errno.EACCES: return responsecode.FORBIDDEN elif e[0] == errno.ENOENT: return responsecode.NOT_FOUND else: raise response = http.Response() response.stream = stream.FileStream(f, 0, self.fp.getsize()) for (header, value) in ( ("content-type", self.contentType()), ("content-encoding", self.contentEncoding()), ): if value is not None: response.headers.setHeader(header, value) return response
from txdav.carddav.iaddressbookstore import IAddressBookObject, IAddressBookHome, \ IAddressBook, IAddressBookTransaction from txdav.common.datastore.test.util import CommonCommonTests from txdav.common.icommondatastore import InvalidUIDError from txdav.common.icommondatastore import ICommonTransaction from txdav.common.icommondatastore import InvalidObjectResourceError from txdav.common.icommondatastore import NoSuchHomeChildError from txdav.common.icommondatastore import ObjectResourceNameAlreadyExistsError from txdav.idav import IPropertyStore, IDataStore from txdav.xml.element import WebDAVUnknownElement from calendarserver.push.util import PushPriority storePath = FilePath(__file__).parent().child("addressbook_store") home1Root = storePath.child("ho").child("me").child("home1") home2Root = storePath.child("ho").child("me").child("home2") home3Root = storePath.child("ho").child("me").child("home3") adbk1Root = home1Root.child("addressbook") adbk2Root = home2Root.child("addressbook") adbk3Root = home3Root.child("addressbook") addressbook1_objectNames = [ "1.vcf", "2.vcf", "3.vcf", ] home1_addressbookNames = [
class HomeMigrationTests(CommonCommonTests, TestCase): """ Tests for L{UpgradeToDatabaseStep}. """ av1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 CALSCALE:GREGORIAN PRODID:-//calendarserver.org//Zonal//EN BEGIN:VAVAILABILITY ORGANIZER:mailto:[email protected] UID:[email protected] DTSTAMP:20061005T133225Z DTEND:20140101T000000Z BEGIN:AVAILABLE UID:[email protected] DTSTAMP:20061005T133225Z SUMMARY:Monday to Friday from 9:00 to 17:00 DTSTART:20130101T090000Z DTEND:20130101T170000Z RRULE:FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR END:AVAILABLE END:VAVAILABILITY END:VCALENDAR """) @inlineCallbacks def setUp(self): """ Set up two stores to migrate between. """ yield super(HomeMigrationTests, self).setUp() yield self.buildStoreAndDirectory( extraUids=( u"home1", u"home2", u"home3", u"home_defaults", u"home_no_splits", u"home_splits", u"home_splits_shared", ) ) self.sqlStore = self.store # Add some files to the file store. self.filesPath = CachingFilePath(self.mktemp()) self.filesPath.createDirectory() fileStore = self.fileStore = CommonDataStore( self.filesPath, {"push": StubNotifierFactory()}, self.directory, True, True ) self.upgrader = UpgradeToDatabaseStep(self.fileStore, self.sqlStore) requirements = CommonTests.requirements extras = deriveValue(self, "extraRequirements", lambda t: {}) requirements = self.mergeRequirements(requirements, extras) yield populateCalendarsFrom(requirements, fileStore) md5s = CommonTests.md5s yield resetCalendarMD5s(md5s, fileStore) self.filesPath.child("calendars").child( "__uids__").child("ho").child("me").child("home1").child( ".some-extra-data").setContent("some extra data") requirements = ABCommonTests.requirements yield populateAddressBooksFrom(requirements, fileStore) md5s = ABCommonTests.md5s yield resetAddressBookMD5s(md5s, fileStore) self.filesPath.child("addressbooks").child( "__uids__").child("ho").child("me").child("home1").child( ".some-extra-data").setContent("some extra data") # Add some properties we want to check get migrated over txn = self.fileStore.newTransaction() home = yield txn.calendarHomeWithUID("home_defaults") cal = yield home.calendarWithName("calendar_1") props = cal.properties() props[PropertyName.fromElement(caldavxml.SupportedCalendarComponentSet)] = caldavxml.SupportedCalendarComponentSet( caldavxml.CalendarComponent(name="VEVENT"), caldavxml.CalendarComponent(name="VTODO"), ) props[PropertyName.fromElement(element.ResourceType)] = element.ResourceType( element.Collection(), caldavxml.Calendar(), ) props[PropertyName.fromElement(customxml.GETCTag)] = customxml.GETCTag.fromString("foobar") inbox = yield home.calendarWithName("inbox") props = inbox.properties() props[PropertyName.fromElement(customxml.CalendarAvailability)] = customxml.CalendarAvailability.fromString(str(self.av1)) props[PropertyName.fromElement(caldavxml.ScheduleDefaultCalendarURL)] = caldavxml.ScheduleDefaultCalendarURL( element.HRef.fromString("/calendars/__uids__/home_defaults/calendar_1"), ) yield txn.commit() def mergeRequirements(self, a, b): """ Merge two requirements dictionaries together, modifying C{a} and returning it. @param a: Some requirements, in the format of L{CommonTests.requirements}. @type a: C{dict} @param b: Some additional requirements, to be merged into C{a}. @type b: C{dict} @return: C{a} @rtype: C{dict} """ for homeUID in b: homereq = a.setdefault(homeUID, {}) homeExtras = b[homeUID] for calendarUID in homeExtras: calreq = homereq.setdefault(calendarUID, {}) calendarExtras = homeExtras[calendarUID] calreq.update(calendarExtras) return a @withSpecialValue( "extraRequirements", { "home1": { "calendar_1": { "bogus.ics": ( getModule("twistedcaldav").filePath.sibling("zoneinfo") .child("EST.ics").getContent(), CommonTests.metadata1 ) } } } ) @inlineCallbacks def test_unknownTypeNotMigrated(self): """ The only types of calendar objects that should get migrated are VEVENTs and VTODOs. Other component types, such as free-standing VTIMEZONEs, don't have a UID and can't be stored properly in the database, so they should not be migrated. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) self.assertIdentical( None, (yield (yield (yield ( yield txn.calendarHomeWithUID("home1") ).calendarWithName("calendar_1")) ).calendarObjectWithName("bogus.ics")) ) @inlineCallbacks def test_upgradeCalendarHomes(self): """ L{UpgradeToDatabaseService.startService} will do the upgrade, then start its dependent service by adding it to its service hierarchy. """ # Create a fake directory in the same place as a home, but with a non-existent uid fake_dir = self.filesPath.child("calendars").child("__uids__").child("ho").child("me").child("foobar") fake_dir.makedirs() # Create a fake file in the same place as a home,with a name that matches the hash uid prefix fake_file = self.filesPath.child("calendars").child("__uids__").child("ho").child("me").child("home_file") fake_file.setContent("") yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) for uid in CommonTests.requirements: if CommonTests.requirements[uid] is not None: self.assertNotIdentical( None, (yield txn.calendarHomeWithUID(uid)) ) # Successfully migrated calendar homes are deleted self.assertFalse(self.filesPath.child("calendars").child( "__uids__").child("ho").child("me").child("home1").exists()) # Want metadata preserved home = (yield txn.calendarHomeWithUID("home1")) calendar = (yield home.calendarWithName("calendar_1")) for name, metadata, md5 in ( ("1.ics", CommonTests.metadata1, CommonTests.md5Values[0]), ("2.ics", CommonTests.metadata2, CommonTests.md5Values[1]), ("3.ics", CommonTests.metadata3, CommonTests.md5Values[2]), ): object = (yield calendar.calendarObjectWithName(name)) self.assertEquals(object.getMetadata(), metadata) self.assertEquals(object.md5(), md5) @withSpecialValue( "extraRequirements", { "nonexistent": { "calendar_1": { } } } ) @inlineCallbacks def test_upgradeCalendarHomesMissingDirectoryRecord(self): """ Test an upgrade where a directory record is missing for a home; the original home directory will remain on disk. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) for uid in CommonTests.requirements: if CommonTests.requirements[uid] is not None: self.assertNotIdentical( None, (yield txn.calendarHomeWithUID(uid)) ) self.assertIdentical(None, (yield txn.calendarHomeWithUID(u"nonexistent"))) # Skipped calendar homes are not deleted self.assertTrue(self.filesPath.child("calendars").child( "__uids__").child("no").child("ne").child("nonexistent").exists()) @inlineCallbacks def test_upgradeExistingHome(self): """ L{UpgradeToDatabaseService.startService} will skip migrating existing homes. """ startTxn = self.sqlStore.newTransaction("populate empty sample") yield startTxn.calendarHomeWithUID("home1", create=True) yield startTxn.commit() yield self.upgrader.stepWithResult(None) vrfyTxn = self.sqlStore.newTransaction("verify sample still empty") self.addCleanup(vrfyTxn.commit) home = yield vrfyTxn.calendarHomeWithUID("home1") # The default calendar is still there. self.assertNotIdentical(None, (yield home.calendarWithName("calendar"))) # The migrated calendar isn't. self.assertIdentical(None, (yield home.calendarWithName("calendar_1"))) @inlineCallbacks def test_upgradeAttachments(self): """ L{UpgradeToDatabaseService.startService} upgrades calendar attachments as well. """ # Need to tweak config and settings to setup dropbox to work self.patch(config, "EnableDropBox", True) self.patch(config, "EnableManagedAttachments", False) self.sqlStore.enableManagedAttachments = False txn = self.sqlStore.newTransaction() cs = schema.CALENDARSERVER yield Delete( From=cs, Where=cs.NAME == "MANAGED-ATTACHMENTS" ).on(txn) yield txn.commit() txn = self.fileStore.newTransaction() committed = [] def maybeCommit(): if not committed: committed.append(True) return txn.commit() self.addCleanup(maybeCommit) @inlineCallbacks def getSampleObj(): home = (yield txn.calendarHomeWithUID("home1")) calendar = (yield home.calendarWithName("calendar_1")) object = (yield calendar.calendarObjectWithName("1.ics")) returnValue(object) inObject = yield getSampleObj() someAttachmentName = "some-attachment" someAttachmentType = MimeType.fromString("application/x-custom-type") attachment = yield inObject.createAttachmentWithName( someAttachmentName, ) transport = attachment.store(someAttachmentType) someAttachmentData = "Here is some data for your attachment, enjoy." transport.write(someAttachmentData) yield transport.loseConnection() yield maybeCommit() yield self.upgrader.stepWithResult(None) committed = [] txn = self.sqlStore.newTransaction() outObject = yield getSampleObj() outAttachment = yield outObject.attachmentWithName(someAttachmentName) allDone = Deferred() class SimpleProto(Protocol): data = '' def dataReceived(self, data): self.data += data def connectionLost(self, reason): allDone.callback(self.data) self.assertEquals(outAttachment.contentType(), someAttachmentType) outAttachment.retrieve(SimpleProto()) allData = yield allDone self.assertEquals(allData, someAttachmentData) @inlineCallbacks def test_upgradeAddressBookHomes(self): """ L{UpgradeToDatabaseService.startService} will do the upgrade, then start its dependent service by adding it to its service hierarchy. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) for uid in ABCommonTests.requirements: if ABCommonTests.requirements[uid] is not None: self.assertNotIdentical( None, (yield txn.addressbookHomeWithUID(uid)) ) # Successfully migrated addressbook homes are deleted self.assertFalse(self.filesPath.child("addressbooks").child( "__uids__").child("ho").child("me").child("home1").exists()) # Want metadata preserved home = (yield txn.addressbookHomeWithUID("home1")) adbk = (yield home.addressbookWithName("addressbook")) for name, md5 in ( ("1.vcf", ABCommonTests.md5Values[0]), ("2.vcf", ABCommonTests.md5Values[1]), ("3.vcf", ABCommonTests.md5Values[2]), ): object = (yield adbk.addressbookObjectWithName(name)) self.assertEquals(object.md5(), md5) @inlineCallbacks def test_upgradeProperties(self): """ L{UpgradeToDatabaseService.startService} will do the upgrade, then start its dependent service by adding it to its service hierarchy. """ yield self.upgrader.stepWithResult(None) txn = self.sqlStore.newTransaction() self.addCleanup(txn.commit) # Want metadata preserved home = (yield txn.calendarHomeWithUID("home_defaults")) cal = (yield home.calendarWithName("calendar_1")) inbox = (yield home.calendarWithName("inbox")) # Supported components self.assertEqual(cal.getSupportedComponents(), "VEVENT") self.assertTrue(cal.properties().get(PropertyName.fromElement(caldavxml.SupportedCalendarComponentSet)) is None) # Resource type removed self.assertTrue(cal.properties().get(PropertyName.fromElement(element.ResourceType)) is None) # Ctag removed self.assertTrue(cal.properties().get(PropertyName.fromElement(customxml.GETCTag)) is None) # Availability self.assertEquals(str(home.getAvailability()), str(self.av1)) self.assertTrue(inbox.properties().get(PropertyName.fromElement(customxml.CalendarAvailability)) is None) # Default calendar self.assertTrue(home.isDefaultCalendar(cal)) self.assertTrue(inbox.properties().get(PropertyName.fromElement(caldavxml.ScheduleDefaultCalendarURL)) is None) def test_fileStoreFromPath(self): """ Verify that fileStoreFromPath() will return a CommonDataStore if the given path contains either "calendars" or "addressbooks" sub-directories. Otherwise it returns None """ # No child directories docRootPath = CachingFilePath(self.mktemp()) docRootPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertEquals(step, None) # "calendars" child directory exists childPath = docRootPath.child("calendars") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove() # "addressbooks" child directory exists childPath = docRootPath.child("addressbooks") childPath.createDirectory() step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath) self.assertTrue(isinstance(step, CommonDataStore)) childPath.remove()
] DelayedCall.debug = True def _todo(f, why): f.todo = why return f featureUnimplemented = lambda f: _todo(f, "Feature unimplemented") testUnimplemented = lambda f: _todo(f, "Test unimplemented") todo = lambda why: lambda f: _todo(f, why) dirTest = FilePath(__file__).parent().sibling("directory").child("test") xmlFile = dirTest.child("accounts.xml") resourcesFile = dirTest.child("resources.xml") augmentsFile = dirTest.child("augments.xml") proxiesFile = dirTest.child("proxies.xml") class SimpleStoreRequest(SimpleRequest): """ A SimpleRequest that automatically grabs the proper transaction for a test. """ def __init__(self, test, method, uri, headers=None, content=None, authPrincipal=None): super(SimpleStoreRequest, self).__init__(test.site, method, uri, headers, content) self._test = test self._newStoreTransaction = test.transactionUnderTest(txn=transactionFromRequest(self, test.storeUnderTest())) self.credentialFactories = {}
def buildTestDirectory( store, dataRoot, accounts=None, resources=None, augments=None, proxies=None, serversDB=None ): """ @param store: the store for the directory to use @param dataRoot: the directory to copy xml files to @param accounts: path to the accounts.xml file @type accounts: L{FilePath} @param resources: path to the resources.xml file @type resources: L{FilePath} @param augments: path to the augments.xml file @type augments: L{FilePath} @param proxies: path to the proxies.xml file @type proxies: L{FilePath} @return: the directory service @rtype: L{IDirectoryService} """ defaultDirectory = FilePath(__file__).sibling("accounts") if accounts is None: accounts = defaultDirectory.child("accounts.xml") if resources is None: resources = defaultDirectory.child("resources.xml") if augments is None: augments = defaultDirectory.child("augments.xml") if proxies is None: proxies = defaultDirectory.child("proxies.xml") if not os.path.exists(dataRoot): os.makedirs(dataRoot) accountsCopy = FilePath(dataRoot).child("accounts.xml") accountsCopy.setContent(accounts.getContent()) resourcesCopy = FilePath(dataRoot).child("resources.xml") resourcesCopy.setContent(resources.getContent()) augmentsCopy = FilePath(dataRoot).child("augments.xml") augmentsCopy.setContent(augments.getContent()) proxiesCopy = FilePath(dataRoot).child("proxies.xml") proxiesCopy.setContent(proxies.getContent()) servicesInfo = ( ConfigDict( { "Enabled": True, "type": "xml", "params": { "xmlFile": "accounts.xml", "recordTypes": ("users", "groups"), }, } ), ConfigDict( { "Enabled": True, "type": "xml", "params": { "xmlFile": "resources.xml", "recordTypes": ("locations", "resources", "addresses"), }, } ), ) augmentServiceInfo = ConfigDict( { "type": "twistedcaldav.directory.augment.AugmentXMLDB", "params": { "xmlFiles": ["augments.xml", ], "statSeconds": 15, }, } ) wikiServiceInfo = ConfigDict( { "Enabled": True, "CollabHost": "localhost", "CollabPort": 4444, } ) directory = buildDirectory( store, dataRoot, servicesInfo, augmentServiceInfo, wikiServiceInfo, serversDB ) store.setDirectoryService(directory) return directory
def buildTestDirectory(store, dataRoot, accounts=None, resources=None, augments=None, proxies=None, serversDB=None): """ @param store: the store for the directory to use @param dataRoot: the directory to copy xml files to @param accounts: path to the accounts.xml file @type accounts: L{FilePath} @param resources: path to the resources.xml file @type resources: L{FilePath} @param augments: path to the augments.xml file @type augments: L{FilePath} @param proxies: path to the proxies.xml file @type proxies: L{FilePath} @return: the directory service @rtype: L{IDirectoryService} """ defaultDirectory = FilePath(__file__).sibling("accounts") if accounts is None: accounts = defaultDirectory.child("accounts.xml") if resources is None: resources = defaultDirectory.child("resources.xml") if augments is None: augments = defaultDirectory.child("augments.xml") if proxies is None: proxies = defaultDirectory.child("proxies.xml") if not os.path.exists(dataRoot): os.makedirs(dataRoot) accountsCopy = FilePath(dataRoot).child("accounts.xml") accountsCopy.setContent(accounts.getContent()) resourcesCopy = FilePath(dataRoot).child("resources.xml") resourcesCopy.setContent(resources.getContent()) augmentsCopy = FilePath(dataRoot).child("augments.xml") augmentsCopy.setContent(augments.getContent()) proxiesCopy = FilePath(dataRoot).child("proxies.xml") proxiesCopy.setContent(proxies.getContent()) servicesInfo = ( ConfigDict({ "Enabled": True, "type": "xml", "params": { "xmlFile": "accounts.xml", "recordTypes": ("users", "groups"), }, }), ConfigDict({ "Enabled": True, "type": "xml", "params": { "xmlFile": "resources.xml", "recordTypes": ("locations", "resources", "addresses"), }, }), ) augmentServiceInfo = ConfigDict({ "type": "twistedcaldav.directory.augment.AugmentXMLDB", "params": { "xmlFiles": [ "augments.xml", ], "statSeconds": 15, }, }) wikiServiceInfo = ConfigDict({ "Enabled": True, "CollabHost": "localhost", "CollabPort": 4444, }) directory = buildDirectory(store, dataRoot, servicesInfo, augmentServiceInfo, wikiServiceInfo, serversDB) store.setDirectoryService(directory) return directory
from txdav.carddav.iaddressbookstore import IAddressBookObject, IAddressBookHome, \ IAddressBook, IAddressBookTransaction from txdav.common.datastore.test.util import CommonCommonTests from txdav.common.icommondatastore import InvalidUIDError from txdav.common.icommondatastore import ICommonTransaction from txdav.common.icommondatastore import InvalidObjectResourceError from txdav.common.icommondatastore import NoSuchHomeChildError from txdav.common.icommondatastore import ObjectResourceNameAlreadyExistsError from txdav.idav import IPropertyStore, IDataStore from txdav.xml.element import WebDAVUnknownElement from calendarserver.push.util import PushPriority storePath = FilePath(__file__).parent().child("addressbook_store") home1Root = storePath.child("ho").child("me").child("home1") home2Root = storePath.child("ho").child("me").child("home2") home3Root = storePath.child("ho").child("me").child("home3") adbk1Root = home1Root.child("addressbook") adbk2Root = home2Root.child("addressbook") adbk3Root = home3Root.child("addressbook") addressbook1_objectNames = [ "1.vcf", "2.vcf", "3.vcf", ] home1_addressbookNames = [