Пример #1
0
def setUpCalendarStore(test):
    test.root = FilePath(test.mktemp())
    test.root.createDirectory()

    storeRootPath = test.storeRootPath = test.root.child("store")
    calendarPath = storeRootPath.child("calendars").child("__uids__")
    calendarPath.parent().makedirs()
    storePath.copyTo(calendarPath)

    # Set year values to current year
    nowYear = DateTime.getToday().getYear()
    for home in calendarPath.child("ho").child("me").children():
        if not home.basename().startswith("."):
            for calendar in home.children():
                if not calendar.basename().startswith("."):
                    for resource in calendar.children():
                        if resource.basename().endswith(".ics"):
                            resource.setContent(resource.getContent() % {"now": nowYear})

    testID = test.id()
    test.counter = 0
    test.notifierFactory = StubNotifierFactory()
    test.calendarStore = CalendarStore(
        storeRootPath,
        {"push": test.notifierFactory} if test.notifierFactory else {},
        None,  # must create directory later
        quota=deriveQuota(test),
    )
    test.directory = buildTestDirectory(test.calendarStore, test.mktemp())
    test.txn = test.calendarStore.newTransaction(testID + "(old)")
    assert test.calendarStore is not None, "No calendar store?"
Пример #2
0
 def setUp(self):
     self.notifierFactory = StubNotifierFactory()
     self.store = yield buildStore(self, self.notifierFactory)
     self.addCleanup(self.maybeCommitLast)
     self._txn = self.store.newTransaction()
     self.propertyStore = \
         self.propertyStore1 = yield PropertyStore.load("user01", None, None, self._txn, 1)
     self.propertyStore2 = yield PropertyStore.load("user01", "user02", None, self._txn, 1)
     self.propertyStore3 = yield PropertyStore.load("user01", None, "user03", self._txn, 1)
     self.propertyStore4 = yield PropertyStore.load("user01", "user02", "user04", self._txn, 1)
Пример #3
0
    def setUp(self):
        TestCase.setUp(self)

        test_upgrader = UpgradeDatabaseSchemaStep(None)
        self.upgradePath = test_upgrader.schemaLocation.child("old").child(
            POSTGRES_DIALECT)
        self.currentVersion = self._getSchemaVersion(
            test_upgrader.schemaLocation.child("current.sql"), "VERSION")

        self.store = yield theStoreBuilder.buildStore(
            self, {"push": StubNotifierFactory()}, enableJobProcessing=False)
Пример #4
0
def setUpAddressBookStore(test):
    test.root = FilePath(test.mktemp())
    test.root.createDirectory()

    storeRootPath = test.storeRootPath = test.root.child("store")
    addressbookPath = storeRootPath.child("addressbooks").child("__uids__")
    addressbookPath.parent().makedirs()
    storePath.copyTo(addressbookPath)

    test.counter = 0
    test.notifierFactory = StubNotifierFactory()
    test.addressbookStore = AddressBookStore(storeRootPath, {"push": test.notifierFactory}, None)
    test.txn = test.addressbookStore.newTransaction(test.id() + " (old)")
    assert test.addressbookStore is not None, "No addressbook store?"
Пример #5
0
    def _initStore(self, enableManagedAttachments=True):
        """
        Build a store with certain bits cleaned out.
        """

        self.patch(config, "EnableManagedAttachments",
                   enableManagedAttachments)

        store = yield theStoreBuilder.buildStore(
            self, {"push": StubNotifierFactory()})
        store.enableManagedAttachments = enableManagedAttachments

        txn = store.newTransaction()
        cs = schema.CALENDARSERVER
        yield Delete(From=cs, Where=cs.NAME == "MANAGED-ATTACHMENTS").on(txn)
        yield txn.commit()

        returnValue(store)
Пример #6
0
    def setUp(self):
        yield super(MultiStoreConduitTest, self).setUp()

        # Stores
        for i in range(self.numberOfStores):
            serversDB = ServersDB()
            for j in range(self.numberOfStores):
                letter = chr(ord("A") + j)
                port = 8008 + 100 * j
                server = Server(
                    letter, "http://127.0.0.1:{}".format(port), letter, j == i
                )
                serversDB.addServer(server)

            if i == 0:
                yield self.buildStoreAndDirectory(
                    serversDB=serversDB,
                    storeBuilder=self.theStoreBuilders[i],
                    accounts=self.accounts,
                    augments=self.augments,
                )
                self.theStores[i] = self.store
                self.theNotifiers[i] = self.notifierFactory
            else:
                self.theNotifiers[i] = StubNotifierFactory()
                self.theStores[i] = yield self.buildStore(
                    self.theStoreBuilders[i],
                    notifierFactory=self.theNotifiers[i],
                )
                directory = buildTestDirectory(
                    self.theStores[i],
                    self.mktemp(),
                    serversDB=serversDB,
                    accounts=self.accounts,
                    augments=self.augments,
                )
                self.theStores[i].setDirectoryService(directory)

            self.theStores[i].queryCacher = None     # Cannot use query caching
            self.theStores[i].conduit = self.makeConduit(self.theStores[i])

            FakeConduitRequest.addServerStore(
                serversDB.getServerById(chr(ord("A") + i)), self.theStores[i]
            )
Пример #7
0
    def setUp(self):

        self.serverRoot = self.mktemp()
        os.mkdir(self.serverRoot)
        self.absoluteServerRoot = os.path.abspath(self.serverRoot)

        configRoot = os.path.join(self.absoluteServerRoot, "Config")
        if not os.path.exists(configRoot):
            os.makedirs(configRoot)

        dataRoot = os.path.join(self.absoluteServerRoot, "Data")
        if not os.path.exists(dataRoot):
            os.makedirs(dataRoot)

        documentRoot = os.path.join(self.absoluteServerRoot, "Documents")
        if not os.path.exists(documentRoot):
            os.makedirs(documentRoot)

        logRoot = os.path.join(self.absoluteServerRoot, "Logs")
        if not os.path.exists(logRoot):
            os.makedirs(logRoot)

        runRoot = os.path.join(self.absoluteServerRoot, "Run")
        if not os.path.exists(runRoot):
            os.makedirs(runRoot)

        config.reset()

        testRoot = os.path.join(os.path.dirname(__file__), "gateway")
        templateName = os.path.join(testRoot, "caldavd.plist")
        with open(templateName) as templateFile:
            template = templateFile.read()

        databaseRoot = os.path.abspath("_spawned_scripts_db" + str(os.getpid()))
        newConfig = template % {
            "ServerRoot": self.absoluteServerRoot,
            "DataRoot": dataRoot,
            "DatabaseRoot": databaseRoot,
            "DocumentRoot": documentRoot,
            "ConfigRoot": configRoot,
            "LogRoot": logRoot,
            "RunRoot": runRoot,
            "WritablePlist": os.path.join(
                os.path.abspath(configRoot), "caldavd-writable.plist"
            ),
        }
        configFilePath = FilePath(
            os.path.join(configRoot, "caldavd.plist")
        )

        configFilePath.setContent(newConfig)

        self.configFileName = configFilePath.path
        config.load(self.configFileName)

        config.Memcached.Pools.Default.ClientEnabled = False
        config.Memcached.Pools.Default.ServerEnabled = False
        ClientFactory.allowTestCache = True
        memcacher.Memcacher.allowTestCache = True
        memcacher.Memcacher.reset()
        config.DirectoryAddressBook.Enabled = False
        config.UsePackageTimezones = True

        origUsersFile = FilePath(
            os.path.join(
                os.path.dirname(__file__),
                "gateway",
                "users-groups.xml"
            )
        )
        copyUsersFile = FilePath(
            os.path.join(config.DataRoot, "accounts.xml")
        )
        origUsersFile.copyTo(copyUsersFile)

        origResourcesFile = FilePath(
            os.path.join(
                os.path.dirname(__file__),
                "gateway",
                "resources-locations.xml"
            )
        )
        copyResourcesFile = FilePath(
            os.path.join(config.DataRoot, "resources.xml")
        )
        origResourcesFile.copyTo(copyResourcesFile)

        origAugmentFile = FilePath(
            os.path.join(
                os.path.dirname(__file__),
                "gateway",
                "augments.xml"
            )
        )
        copyAugmentFile = FilePath(os.path.join(config.DataRoot, "augments.xml"))
        origAugmentFile.copyTo(copyAugmentFile)

        self.notifierFactory = StubNotifierFactory()
        self.store = yield theStoreBuilder.buildStore(self, self.notifierFactory)
        self.directory = directoryFromConfig(config, self.store)
Пример #8
0
    def setUp(self):
        TestCase.setUp(self)

        self.store = yield self.testStoreBuilder.buildStore(
            self, {"push": StubNotifierFactory()}, enableJobProcessing=False
        )
Пример #9
0
    def setUp(self):
        """
        Set up two stores to migrate between.
        """

        yield super(HomeMigrationTests, self).setUp()
        yield self.buildStoreAndDirectory(extraUids=(
            u"home1",
            u"home2",
            u"home3",
            u"home_defaults",
            u"home_no_splits",
            u"home_splits",
            u"home_splits_shared",
        ))
        self.sqlStore = self.store

        # Add some files to the file store.

        self.filesPath = CachingFilePath(self.mktemp())
        self.filesPath.createDirectory()
        fileStore = self.fileStore = CommonDataStore(
            self.filesPath, {"push": StubNotifierFactory()}, self.directory,
            True, True)
        self.upgrader = UpgradeToDatabaseStep(self.fileStore, self.sqlStore)

        requirements = CommonTests.requirements
        extras = deriveValue(self, "extraRequirements", lambda t: {})
        requirements = self.mergeRequirements(requirements, extras)

        yield populateCalendarsFrom(requirements, fileStore)
        md5s = CommonTests.md5s
        yield resetCalendarMD5s(md5s, fileStore)
        self.filesPath.child("calendars").child("__uids__").child("ho").child(
            "me").child("home1").child(".some-extra-data").setContent(
                "some extra data")

        requirements = ABCommonTests.requirements
        yield populateAddressBooksFrom(requirements, fileStore)
        md5s = ABCommonTests.md5s
        yield resetAddressBookMD5s(md5s, fileStore)
        self.filesPath.child("addressbooks").child("__uids__").child(
            "ho").child("me").child("home1").child(
                ".some-extra-data").setContent("some extra data")

        # Add some properties we want to check get migrated over
        txn = self.fileStore.newTransaction()
        home = yield txn.calendarHomeWithUID("home_defaults")

        cal = yield home.calendarWithName("calendar_1")
        props = cal.properties()
        props[PropertyName.fromElement(
            caldavxml.SupportedCalendarComponentSet
        )] = caldavxml.SupportedCalendarComponentSet(
            caldavxml.CalendarComponent(name="VEVENT"),
            caldavxml.CalendarComponent(name="VTODO"),
        )
        props[PropertyName.fromElement(
            element.ResourceType)] = element.ResourceType(
                element.Collection(),
                caldavxml.Calendar(),
            )
        props[PropertyName.fromElement(
            customxml.GETCTag)] = customxml.GETCTag.fromString("foobar")

        inbox = yield home.calendarWithName("inbox")
        props = inbox.properties()
        props[PropertyName.fromElement(
            customxml.CalendarAvailability
        )] = customxml.CalendarAvailability.fromString(str(self.av1))
        props[PropertyName.fromElement(
            caldavxml.ScheduleDefaultCalendarURL
        )] = caldavxml.ScheduleDefaultCalendarURL(
            element.HRef.fromString(
                "/calendars/__uids__/home_defaults/calendar_1"), )

        yield txn.commit()
Пример #10
0
    def _dbDataUpgrades(self, version, versionKey, upgraderClass):
        """
        This does a full DB test of all possible data upgrade paths. For each old schema, it loads it into the DB
        then runs the data upgrade service. This ensures all the upgrade_XX.py files work correctly - at least for
        postgres.

        TODO: this currently does not create any data to test with. It simply runs the upgrade on an empty
        store.
        """

        store = yield theStoreBuilder.buildStore(
            self, {"push": StubNotifierFactory()}, enableJobProcessing=False)

        @inlineCallbacks
        def _loadOldData(path, oldVersion):
            """
            Use the postgres schema mechanism to do tests under a separate "namespace"
            in postgres that we can quickly wipe clean afterwards.
            """
            startTxn = store.newTransaction("test_dbUpgrades")
            yield startTxn.execSQL("create schema test_dbUpgrades;")
            yield startTxn.execSQL("set search_path to test_dbUpgrades;")
            yield startTxn.execSQL(path.getContent())
            yield startTxn.execSQL(
                "update CALENDARSERVER set VALUE = '%s' where NAME = '%s';" % (
                    oldVersion,
                    versionKey,
                ))
            yield startTxn.commit()

        @inlineCallbacks
        def _loadVersion():
            startTxn = store.newTransaction("test_dbUpgrades")
            new_version = yield startTxn.execSQL(
                "select value from calendarserver where name = '%s';" %
                (versionKey, ))
            yield startTxn.commit()
            returnValue(int(new_version[0][0]))

        @inlineCallbacks
        def _unloadOldData():
            startTxn = store.newTransaction("test_dbUpgrades")
            yield startTxn.execSQL("set search_path to public;")
            yield startTxn.execSQL("drop schema test_dbUpgrades cascade;")
            yield startTxn.commit()

        @inlineCallbacks
        def _cleanupOldData():
            startTxn = store.newTransaction("test_dbUpgrades")
            yield startTxn.execSQL("set search_path to public;")
            yield startTxn.execSQL(
                "drop schema if exists test_dbUpgrades cascade;")
            yield startTxn.commit()

        self.addCleanup(_cleanupOldData)

        test_upgrader = UpgradeDatabaseSchemaStep(None)
        expected_version = self._getSchemaVersion(
            test_upgrader.schemaLocation.child("current.sql"), versionKey)

        oldVersion = version
        upgrader = upgraderClass(store)
        yield _loadOldData(test_upgrader.schemaLocation.child("current.sql"),
                           oldVersion)
        yield upgrader.databaseUpgrade()
        new_version = yield _loadVersion()
        yield _unloadOldData()

        self.assertEqual(new_version, expected_version)
Пример #11
0
    def _dbSchemaUpgrades(self, child):
        """
        This does a full DB test of all possible upgrade paths. For each old schema, it loads it into the DB
        then runs the upgrade service. This ensures all the upgrade.sql files work correctly - at least for
        postgres.
        """

        store = yield theStoreBuilder.buildStore(
            self, {"push": StubNotifierFactory()}, enableJobProcessing=False)

        @inlineCallbacks
        def _loadOldSchema(path):
            """
            Use the postgres schema mechanism to do tests under a separate "namespace"
            in postgres that we can quickly wipe clean afterwards.
            """
            startTxn = store.newTransaction("test_dbUpgrades")
            yield startTxn.execSQL("create schema test_dbUpgrades;")
            yield startTxn.execSQL("set search_path to test_dbUpgrades;")
            yield startTxn.execSQL(path.getContent())
            yield startTxn.commit()

        @inlineCallbacks
        def _loadVersion():
            startTxn = store.newTransaction("test_dbUpgrades")
            new_version = yield startTxn.execSQL(
                "select value from calendarserver where name = 'VERSION';")
            yield startTxn.commit()
            returnValue(int(new_version[0][0]))

        @inlineCallbacks
        def _loadSchemaFromDatabase():
            startTxn = store.newTransaction("test_dbUpgrades")
            schema = yield dumpSchema(
                startTxn, "Upgraded from %s" % (child.basename(), ),
                "test_dbUpgrades")
            yield startTxn.commit()
            returnValue(schema)

        @inlineCallbacks
        def _unloadOldSchema():
            startTxn = store.newTransaction("test_dbUpgrades")
            yield startTxn.execSQL("set search_path to public;")
            yield startTxn.execSQL("drop schema test_dbUpgrades cascade;")
            yield startTxn.commit()

        @inlineCallbacks
        def _cleanupOldSchema():
            startTxn = store.newTransaction("test_dbUpgrades")
            yield startTxn.execSQL("set search_path to public;")
            yield startTxn.execSQL(
                "drop schema if exists test_dbUpgrades cascade;")
            yield startTxn.commit()

        self.addCleanup(_cleanupOldSchema)

        test_upgrader = UpgradeDatabaseSchemaStep(None)
        expected_version = self._getSchemaVersion(
            test_upgrader.schemaLocation.child("current.sql"), "VERSION")

        # Upgrade allowed
        upgrader = UpgradeDatabaseSchemaStep(store)
        yield _loadOldSchema(child)
        yield upgrader.databaseUpgrade()
        new_version = yield _loadVersion()

        # Compare the upgraded schema with the expected current schema
        new_schema = yield _loadSchemaFromDatabase()
        currentSchema = schemaFromPath(
            test_upgrader.schemaLocation.child("current.sql"))
        mismatched = currentSchema.compare(new_schema)
        # These are special case exceptions
        for i in (
                "Table: CALENDAR_HOME, column name DATAVERSION default mismatch",
                "Table: ADDRESSBOOK_HOME, column name DATAVERSION default mismatch",
                "Table: PUSH_NOTIFICATION_WORK, column name PUSH_PRIORITY default mismatch",
        ):
            try:
                mismatched.remove(i)
            except ValueError:
                pass
        self.assertEqual(len(mismatched), 0,
                         "Schema mismatch:\n" + "\n".join(mismatched))

        yield _unloadOldSchema()

        self.assertEqual(new_version, expected_version)

        # Upgrade disallowed
        upgrader = UpgradeDatabaseSchemaStep(store, failIfUpgradeNeeded=True)
        yield _loadOldSchema(child)
        old_version = yield _loadVersion()
        try:
            yield upgrader.databaseUpgrade()
        except NotAllowedToUpgrade:
            pass
        except Exception:
            self.fail("NotAllowedToUpgrade not raised")
        else:
            self.fail("NotAllowedToUpgrade not raised")
        new_version = yield _loadVersion()
        yield _unloadOldSchema()

        self.assertEqual(old_version, new_version)