def test_dottedNames(self):
     """ Verify that the walkModules APIs will give us back subpackages, not just
     subpackages.  """
     self.assertEquals(
         modules.getModule('twisted.python'),
         self.findByIteration("twisted.python",
                              where=modules.getModule('twisted')))
Example #2
0
    def test_determineUpgradeSequence(self):

        upgrader = UpgradeDatabaseSchemaStep(None)

        upgrader.schemaLocation = getModule(__name__).filePath.sibling("fake_schema1")
        files = upgrader.scanForUpgradeFiles("fake_dialect")
        upgrades = upgrader.determineUpgradeSequence(3, 4, files, "fake_dialect")
        self.assertEqual(upgrades,
            [upgrader.schemaLocation.child("upgrades").child("fake_dialect").child("upgrade_from_3_to_4.sql")],
        )
        self.assertRaises(RuntimeError, upgrader.determineUpgradeSequence, 3, 5, files, "fake_dialect")

        upgrader.schemaLocation = getModule(__name__).filePath.sibling("fake_schema2")
        files = upgrader.scanForUpgradeFiles("fake_dialect")
        upgrades = upgrader.determineUpgradeSequence(3, 5, files, "fake_dialect")
        self.assertEqual(upgrades,
            [upgrader.schemaLocation.child("upgrades").child("fake_dialect").child("upgrade_from_3_to_5.sql")]
        )
        upgrades = upgrader.determineUpgradeSequence(4, 5, files, "fake_dialect")
        self.assertEqual(upgrades,
            [upgrader.schemaLocation.child("upgrades").child("fake_dialect").child("upgrade_from_4_to_5.sql")]
        )

        upgrader.schemaLocation = getModule(__name__).filePath.sibling("fake_schema3")
        files = upgrader.scanForUpgradeFiles("fake_dialect")
        upgrades = upgrader.determineUpgradeSequence(3, 5, files, "fake_dialect")
        self.assertEqual(upgrades,
            [
                upgrader.schemaLocation.child("upgrades").child("fake_dialect").child("upgrade_from_3_to_4.sql"),
                upgrader.schemaLocation.child("upgrades").child("fake_dialect").child("upgrade_from_4_to_5.sql"),
            ]
        )
def main(reactor):
    log.startLogging(sys.stdout)
    certData = getModule(__name__).filePath.sibling('public.pem').getContent()
    authData = getModule(__name__).filePath.sibling('server.pem').getContent()
    authority = ssl.Certificate.loadPEM(certData)
    certificate = ssl.PrivateCertificate.loadPEM(authData)
    factory = protocol.Factory.forProtocol(echoserv.Echo)
    reactor.listenSSL(8000, factory, certificate.options(authority))
    return defer.Deferred()
def _populateSchema(version=None):
    """
    Generate the global L{SchemaSyntax}.
    """

    if version is None:
        pathObj = getModule(__name__).filePath.sibling("sql_schema").child("current.sql")
    else:
        pathObj = getModule(__name__).filePath.sibling("sql_schema").child("old").child(POSTGRES_DIALECT).child("%s.sql" % (version,))
    return SchemaSyntax(schemaFromPath(pathObj))
def main(reactor):
    factory = protocol.Factory.forProtocol(echoclient.EchoClient)
    certData = getModule(__name__).filePath.sibling('public.pem').getContent()
    authData = getModule(__name__).filePath.sibling('server.pem').getContent()
    clientCertificate = ssl.PrivateCertificate.loadPEM(authData)
    authority = ssl.Certificate.loadPEM(certData)
    options = ssl.optionsForClientTLS(u'example.com', authority,
                                      clientCertificate)
    endpoint = endpoints.SSL4ClientEndpoint(reactor, 'localhost', 8000,
                                            options)
    echoClient = yield endpoint.connect(factory)

    done = defer.Deferred()
    echoClient.connectionLost = lambda reason: done.callback(None)
    yield done
Example #6
0
 def test_pathEntriesOnPath(self):
     """
     Verify that path entries discovered via module loading are, in fact, on
     sys.path somewhere.
     """
     for n in ["os", "twisted", "twisted.python", "twisted.python.reflect"]:
         self.failUnlessIn(modules.getModule(n).pathEntry.filePath.path, sys.path)
Example #7
0
 def test_twistedShowsUp(self):
     """
     Scrounge around in the top-level module namespace and make sure that
     Twisted shows up, and that the module thusly obtained is the same as
     the module that we find when we look for it explicitly by name.
     """
     self.assertEqual(modules.getModule("twisted"), self.findByIteration("twisted"))
Example #8
0
def _addBackports():
    """
    We currently require 2 backported bugfixes from a future release of Twisted,
    for IPv6 support:

        - U{IPv6 client support <http://tm.tl/5085>}

        - U{TCP endpoint cancellation <http://tm.tl/4710>}

    This function will activate those backports.  (Note it must be run before
    any of the modules in question are imported or it will raise an exception.)

    This function, L{_hasIPv6ClientSupport}, and all the associated backports
    (i.e., all of C{twext/backport}) should be removed upon upgrading our
    minimum required Twisted version.
    """
    from twext.backport import internet as bpinternet
    from twisted import internet
    internet.__path__[:] = bpinternet.__path__ + internet.__path__

    # Make sure none of the backports are loaded yet.
    backports = getModule("twext.backport.internet")
    for submod in backports.iterModules():
        subname = submod.name.split(".")[-1]
        tiname = 'twisted.internet.' + subname
        if tiname in sys.modules:
            raise RuntimeError(
                tiname + "already loaded, cannot load required backport")
Example #9
0
    def test_matchCalendarUserAddress(self):
        """
        Make sure we do an exact comparison on EmailDomain
        """

        self.patch(config.Scheduling.iSchedule, "Enabled", True)
        self.patch(config.Scheduling.iSchedule, "RemoteServers", "")

        # Only mailtos:
        result = yield ScheduleViaISchedule.matchCalendarUserAddress("http://example.com/principal/user")
        self.assertFalse(result)

        # Need to setup a fake resolver
        module = getModule(__name__)
        dataPath = module.filePath.sibling("data")
        bindPath = dataPath.child("db.example.com")
        self.patch(config.Scheduling.iSchedule, "DNSDebug", bindPath.path)
        utils.DebugResolver = None
        utils._initResolver()

        result = yield ScheduleViaISchedule.matchCalendarUserAddress("mailto:[email protected]")
        self.assertTrue(result)
        result = yield ScheduleViaISchedule.matchCalendarUserAddress("mailto:[email protected]")
        self.assertFalse(result)
        result = yield ScheduleViaISchedule.matchCalendarUserAddress("mailto:[email protected]?subject=foobar")
        self.assertFalse(result)
        result = yield ScheduleViaISchedule.matchCalendarUserAddress("mailto:user")
        self.assertFalse(result)

        # Test when not enabled
        ScheduleViaISchedule.domainServerMap = {}
        self.patch(config.Scheduling.iSchedule, "Enabled", False)
        result = yield ScheduleViaISchedule.matchCalendarUserAddress("mailto:[email protected]")
        self.assertFalse(result)
Example #10
0
        def _findModule(self, fullname, path=None):
            # print "_findModule called with:", fullname, path

            mod = getModule(fullname)
            if mod.filePath.path.endswith('.pyc'):
                return mod.filePath.path, mod.isPackage()
            return None
def _buildTestClasses(_locals):
    """
    Build the test classes that use L{NewStyleOnly}, one class per module.

    @param _locals: The global C{locals()} dict.
    """
    for x in getModule("twisted").walkModules():

        ignoredModules = [
            "twisted.test.reflect_helper",
            "twisted.internet.test.process_",
            "twisted.test.process_"
        ]

        isIgnored = [x.name.startswith(ignored) for ignored in ignoredModules]

        if True in isIgnored:
            continue


        class Test(NewStyleOnly, unittest.TestCase):
            """
            @see: L{NewStyleOnly}
            """
            module = x.name

        acceptableName = x.name.replace(".", "_")
        Test.__name__ = acceptableName
        if hasattr(Test, "__qualname__"):
            Test.__qualname__ = acceptableName
        _locals.update({acceptableName: Test})
    def test_current_oracle(self):
        """
        Make sure current-oracle-dialect.sql matches current.sql
        """

        sqlSchema = getModule(__name__).filePath.parent().sibling("sql_schema")

        currentSchema = sqlSchema.child("current.sql")
        current_version = self.versionFromSchema(currentSchema)

        currentOracleSchema = sqlSchema.child("current-oracle-dialect.sql")
        current_oracle_version = self.versionFromSchema(currentOracleSchema)

        self.assertEqual(current_version, current_oracle_version)

        schema_current = schemaFromPath(currentSchema)
        schema_oracle = schemaFromPath(currentOracleSchema)

        # Remove any not null constraints in the postgres schema for text columns as in
        # Oracle nclob or nvarchar never uses not null
        for table in schema_current.tables:
            for constraint in tuple(table.constraints):
                if constraint.type == Constraint.NOT_NULL and len(constraint.affectsColumns) == 1:
                    if constraint.affectsColumns[0].type.name in ("text", "char", "varchar"):
                        table.constraints.remove(constraint)

        mismatched = schema_current.compare(schema_oracle)
        self.assertEqual(len(mismatched), 0, msg=", ".join(mismatched))
    def test_references_index(self):
        """
        Make sure current-oracle-dialect.sql matches current.sql
        """

        schema = schemaFromPath(getModule(__name__).filePath.parent().sibling("sql_schema").child("current.sql"))

        # Get index details
        indexed_columns = set()
        for index in schema.pseudoIndexes():
            indexed_columns.add("%s.%s" % (index.table.name, index.columns[0].name))
        # print indexed_columns

        # Look at each table
        failures = []
        for table in schema.tables:
            for column in table.columns:
                if column.references is not None:
                    id = "%s.%s" % (table.name, column.name)
                    if id not in indexed_columns:
                        failures.append(id)

        self.assertEqual(
            len(failures), 0, msg="Missing index for references columns: %s" % (", ".join(sorted(failures)))
        )
Example #14
0
    def __init__(self, sqlStore, uid=None, gid=None, failIfUpgradeNeeded=False):
        """
        Initialize the service.
        """
        self.sqlStore = sqlStore
        self.uid = uid
        self.gid = gid
        self.failIfUpgradeNeeded = failIfUpgradeNeeded
        self.schemaLocation = getModule(__name__).filePath.parent().parent().sibling("sql_schema")
        self.pyLocation = getModule(__name__).filePath.parent()

        self.versionKey = None
        self.versionDescriptor = ""
        self.upgradeFilePrefix = ""
        self.upgradeFileSuffix = ""
        self.defaultKeyValue = None
Example #15
0
    def test_lookupServerViaSRV(self):
        """
        Test L{lookupServerViaSRV} with a local Bind find
        """

        # Patch config
        for zonefile, checks in (
            ("db.example.com", (("example.com", "example.com", 8443,),),),
            ("db.two.zones", (
                ("example.com", "example.com", 8443,),
                ("example.org", "example.org", 8543,),
            ),),
            ("db.empty.zone", (("example.com", "", 0,),),),
        ):
            module = getModule(__name__)
            dataPath = module.filePath.sibling("data")
            bindPath = dataPath.child(zonefile)
            self.patch(config.Scheduling.iSchedule, "DNSDebug", bindPath.path)
            utils.DebugResolver = None

            for domain, result_host, result_port in checks:
                result = (yield utils.lookupServerViaSRV(domain))
                if result is None:
                    host = ""
                    port = 0
                else:
                    host, port = result
                self.assertEqual(host, result_host)
                self.assertEqual(port, result_port)
Example #16
0
    def test_nonexistentPaths(self):
        """
        Verify that L{modules.walkModules} ignores entries in sys.path which
        do not exist in the filesystem.
        """
        existentPath = FilePath(self.mktemp())
        os.makedirs(existentPath.child("test_package").path)
        existentPath.child("test_package").child("__init__.py").setContent("")

        nonexistentPath = FilePath(self.mktemp())
        self.failIf(nonexistentPath.exists())

        originalSearchPaths = sys.path[:]
        sys.path[:] = [existentPath.path]
        try:
            expected = [modules.getModule("test_package")]

            beforeModules = list(modules.walkModules())
            sys.path.append(nonexistentPath.path)
            afterModules = list(modules.walkModules())
        finally:
            sys.path[:] = originalSearchPaths

        self.assertEqual(beforeModules, expected)
        self.assertEqual(afterModules, expected)
Example #17
0
File: core.py Project: ibid/ibid
    def load_processors(self, load=None, noload=None, autoload=None):
        """If method parameters are not provided, they'll be looked up from
        config:
        [plugins]
            load = List of plugins / plugin.Processors to load
            noload = List of plugins / plugin.Processors to skip automatically loading
            autoload = (Boolean) Load all plugins by default?
        """
        # Sets up twisted.python so that we can iterate modules
        __import__('ibid.plugins')

        if load is None:
            load = ibid.config.plugins.get('load', [])
        if noload is None:
            noload = ibid.config.plugins.get('noload', [])

        all_plugins = set(plugin.split('.')[0] for plugin in load)
        if autoload is None:
            autoload = ibid.config.plugins.get('autoload', True)

        if autoload:
            all_plugins |= set(plugin.name.replace('ibid.plugins.', '')
                    for plugin in getModule('ibid.plugins').iterModules())

        for plugin in all_plugins:
            load_processors = [p.split('.')[1] for p in load if p.startswith(plugin + '.')]
            noload_processors = [p.split('.')[1] for p in noload if p.startswith(plugin + '.')]
            if plugin not in noload or load_processors:
                self.load_processor(plugin, noload=noload_processors, load=load_processors, load_all=(plugin in load), noload_all=(plugin in noload))
Example #18
0
def module(s):
	base = getModule(s)
	module = base.load()
	attrs = list(base.iterAttributes())
	names = list(map(lambda x: x.name[len(x.onObject.name)+1:], list(base.iterAttributes())))
	members = list(map(lambda x: x.load(), list(base.iterAttributes())))
	return dict(zip(names, members))
def main(reactor):
    certData = getModule(__name__).filePath.sibling('server.pem').getContent()
    cert = ssl.PrivateCertificate.loadPEM(certData)
    factory = protocol.Factory.forProtocol(TLSServer)
    factory.options = cert.options()
    endpoint = endpoints.TCP4ServerEndpoint(reactor, 8000)
    endpoint.listen(factory)
    return defer.Deferred()
Example #20
0
def main(reactor):
    HOST, PORT = 'localhost', 1717
    log.startLogging(sys.stdout)
    certData = getModule(__name__).filePath.sibling('server.pem').getContent()
    certificate = ssl.PrivateCertificate.loadPEM(certData)
    factory = protocol.Factory.forProtocol(Echo)
    reactor.listenSSL(PORT, factory, certificate.options())
    return defer.Deferred()
Example #21
0
    def __init__(self, templateName):
        super(PageElement, self).__init__()

        self.loader = XMLFile(
            getModule(__name__).filePath.sibling(
                u"{name}.xhtml".format(name=templateName)
            )
        )
def main(reactor):
    log.startLogging(sys.stdout)
    certData = getModule("twisted.test").filePath.sibling('server.pem').getContent()
    print(certData)
    certificate = ssl.PrivateCertificate.loadPEM(certData)
    factory = protocol.Factory.forProtocol(Echo)
    reactor.listenSSL(8000, factory, certificate.options())
    return defer.Deferred()
Example #23
0
def addFakePlugin(testCase, dropinSource="fakeendpoint.py"):
    """
    For the duration of C{testCase}, add a fake plugin to twisted.plugins which
    contains some sample endpoint parsers.
    """
    import sys
    savedModules = sys.modules.copy()
    savedPluginPath = plugins.__path__
    def cleanup():
        sys.modules.clear()
        sys.modules.update(savedModules)
        plugins.__path__[:] = savedPluginPath
    testCase.addCleanup(cleanup)
    fp = FilePath(testCase.mktemp())
    fp.createDirectory()
    getModule(__name__).filePath.sibling(dropinSource).copyTo(
        fp.child(dropinSource))
    plugins.__path__.append(fp.path)
def main():
    from twisted.python import log
    log.startLogging(sys.stdout)
    sourcePort = 4433
    targetHost, targetPort = sys.argv[1].split(":", 1)
    certData = getModule("twisted.test").filePath.sibling('server.pem').getContent()
    certificate = ssl.PrivateCertificate.loadPEM(certData)
    reactor.listenSSL(sourcePort, DebugHttpServerFactory(targetHost, int(targetPort)), certificate.options())
    reactor.run()
Example #25
0
 def test_loadPackagesAndModules(self):
     """
     Verify that we can locate and load packages, modules, submodules, and
     subpackages.
     """
     for n in ["os", "twisted", "twisted.python", "twisted.python.reflect"]:
         m = namedAny(n)
         self.failUnlessIdentical(modules.getModule(n).load(), m)
         self.failUnlessIdentical(self.findByIteration(n).load(), m)
Example #26
0
def loadCommands():
    """
    Загрузить все команды СпамоБорца.
    """

    from twisted.python import modules

    module = modules.getModule(__name__)
    for mod in module.walkModules():
        mod.load()
Example #27
0
    def reload(self):
        """Reload all available plugins"""

        log.debug("Reloading all plugins...")

        # Is this the best way to do this?
        for module in modules.getModule(plugins.__name__).iterModules():
            reload(module.load())

        self._load()
Example #28
0
def loadRules():
    """
    Загрузить все правила СпамоБорца.
    """

    from twisted.python import modules

    module = modules.getModule(__name__)
    for mod in module.walkModules():
        mod.load()
Example #29
0
 def refresh(cls, appname):
     """
     Go back to the filesystem and re-read the config file
     """
     try:
         filepath = getModule(__name__).filePath
         basedir = filepath.parent().parent().parent().parent().path
     except Exception, e:
         print("Failed to get project basedir: %s" % (e,))
         raise
Example #30
0
    def test_scanUpgradeFiles(self):

        upgrader = UpgradeDatabaseSchemaStep(None)

        upgrader.schemaLocation = getModule(__name__).filePath.sibling("fake_schema1")
        files = upgrader.scanForUpgradeFiles("fake_dialect")
        self.assertEqual(files,
            [(3, 4, upgrader.schemaLocation.child("upgrades").child("fake_dialect").child("upgrade_from_3_to_4.sql"))],
        )

        upgrader.schemaLocation = getModule(__name__).filePath.sibling("fake_schema2")
        files = upgrader.scanForUpgradeFiles("fake_dialect")
        self.assertEqual(files,
            [
                (3, 4, upgrader.schemaLocation.child("upgrades").child("fake_dialect").child("upgrade_from_3_to_4.sql")),
                (3, 5, upgrader.schemaLocation.child("upgrades").child("fake_dialect").child("upgrade_from_3_to_5.sql")),
                (4, 5, upgrader.schemaLocation.child("upgrades").child("fake_dialect").child("upgrade_from_4_to_5.sql")),
            ]
        )
Example #31
0
    def _trialSortAlgorithm(self, sorter):
        """
        Right now, halfway by accident, trial sorts like this:

            1. all modules are grouped together in one list and sorted.

            2. within each module, the classes are grouped together in one list
               and sorted.

            3. finally within each class, each test method is grouped together
               in a list and sorted.

        This attempts to return a sorted list of testable thingies following
        those rules, so that we can compare the behavior of loadPackage.

        The things that show as 'cases' are errors from modules which failed to
        import, and test methods.  Let's gather all those together.
        """
        pkg = getModule('uberpackage')
        testModules = []
        for testModule in pkg.walkModules():
            if testModule.name.split(".")[-1].startswith("test_"):
                testModules.append(testModule)
        sortedModules = util.dsu(testModules, sorter)  # ONE
        for modinfo in sortedModules:
            # Now let's find all the classes.
            module = modinfo.load(None)
            if module is None:
                yield modinfo
            else:
                testClasses = []
                for attrib in modinfo.iterAttributes():
                    if runner.isTestCase(attrib.load()):
                        testClasses.append(attrib)
                sortedClasses = util.dsu(testClasses, sorter)  # TWO
                for clsinfo in sortedClasses:
                    testMethods = []
                    for attr in clsinfo.iterAttributes():
                        if attr.name.split(".")[-1].startswith('test'):
                            testMethods.append(attr)
                    sortedMethods = util.dsu(testMethods, sorter)  # THREE
                    for methinfo in sortedMethods:
                        yield methinfo
Example #32
0
    def _create_empty_database(self):
        # Make a temporary test database.
        # This assumes SQLite, both in the fact that the database is a single
        # file and in forming the URL.

        # Use tempfile.mkstemp instead of self.mkstemp so that the file survives
        # between tests, but delete it on exit. Windows is why we can't have
        # nice things like NamedTemporaryFile.
        fd, PluginTestCase.empty_dbfile = tempfile.mkstemp()
        os.close(fd)
        atexit.register(os.unlink, PluginTestCase.empty_dbfile)

        ibid.config['databases']['ibid'] = 'sqlite:///' + self.empty_dbfile
        db = DatabaseManager(check_schema_versions=False, sqlite_synchronous=False)
        for module in getModule('ibid.plugins').iterModules():
            try:
                __import__(module.name)
            except Exception, e:
                print >> sys.stderr, u"Couldn't load %s plugin for skeleton DB: %s" % (
                        module.name.replace('ibid.plugins.', ''), unicode(e))
    def test_old_files_consistent(self):
        """
        Make sure txdav.common.datastore.sql_schema.old contains all the appropriate old versions
        """

        sqlSchema = getModule(__name__).filePath.parent().sibling("sql_schema")
        oldDirectory = sqlSchema.child("old")

        for child in oldDirectory.children():
            if child.basename().startswith("."):
                continue
            for oldVersion in child.children():
                if oldVersion.basename().startswith("."):
                    continue
                found = re.search("v(\d+).sql", oldVersion.basename())
                if found is None:
                    self.fail("%s is not a valid old sql file" % (oldVersion))
                old_name_version = int(found.group(1))
                old_version = self.versionFromSchema(oldVersion)
                self.assertEqual(old_name_version, old_version, "Name of schema file does not match actual schema version: %s" % (oldVersion.path,))
Example #34
0
    def test_nonexistentPaths(self):
        """
        Verify that L{modules.walkModules} ignores entries in sys.path which
        do not exist in the filesystem.
        """
        existentPath = self.pathEntryWithOnePackage()

        nonexistentPath = FilePath(self.mktemp())
        self.assertFalse(nonexistentPath.exists())

        self.replaceSysPath([existentPath.path])

        expected = [modules.getModule("test_package")]

        beforeModules = list(modules.walkModules())
        sys.path.append(nonexistentPath.path)
        afterModules = list(modules.walkModules())

        self.assertEqual(beforeModules, expected)
        self.assertEqual(afterModules, expected)
Example #35
0
    def loadPackage(self, package, recurse=False):
        """
        Load tests from a module object representing a package, and return a
        TestSuite containing those tests.

        Tests are only loaded from modules whose name begins with 'test_'
        (or whatever C{modulePrefix} is set to).

        @param package: a types.ModuleType object (or reasonable facsimile
        obtained by importing) which may contain tests.

        @param recurse: A boolean.  If True, inspect modules within packages
        within the given package (and so on), otherwise, only inspect modules
        in the package itself.

        @raise: TypeError if 'package' is not a package.

        @return: a TestSuite created with my suiteFactory, containing all the
        tests.
        """
        if not isPackage(package):
            raise TypeError("%r is not a package" % (package, ))
        pkgobj = modules.getModule(package.__name__)
        if recurse:
            discovery = pkgobj.walkModules()
        else:
            discovery = pkgobj.iterModules()
        discovered = []
        for disco in discovery:
            if disco.name.split(".")[-1].startswith(self.modulePrefix):
                discovered.append(disco)
        suite = self.suiteFactory()
        for modinfo in self.sort(discovered):
            try:
                module = modinfo.load()
            except:
                thingToAdd = ErrorHolder(modinfo.name, failure.Failure())
            else:
                thingToAdd = self.loadModule(module)
            suite.addTest(thingToAdd)
        return suite
Example #36
0
class FailureElement(Element):
    """
    L{FailureElement} is an L{IRenderable} which can render detailed information
    about a L{Failure<twisted.python.failure.Failure>}.

    @ivar failure: The L{Failure<twisted.python.failure.Failure>} instance which
        will be rendered.

    @since: 12.1
    """
    loader = XMLFile(getModule(__name__).filePath.sibling("failure.xhtml"))

    def __init__(self, failure, loader=None):
        Element.__init__(self, loader)
        self.failure = failure


    @renderer
    def type(self, request, tag):
        """
        Render the exception type as a child of C{tag}.
        """
        return tag(fullyQualifiedName(self.failure.type))


    @renderer
    def value(self, request, tag):
        """
        Render the exception value as a child of C{tag}.
        """
        return tag(str(self.failure.value))


    @renderer
    def traceback(self, request, tag):
        """
        Render all the frames in the wrapped
        L{Failure<twisted.python.failure.Failure>}'s traceback stack, replacing
        C{tag}.
        """
        return _StackElement(TagLoader(tag), self.failure.frames)
Example #37
0
    def test_lookupServerViaSRV(self):
        """
        Test L{lookupServerViaSRV} with a local Bind find
        """

        # Patch config
        for zonefile, checks in (
            (
                "db.example.com",
                ((
                    "example.com",
                    "example.com",
                    8443,
                ), ),
            ),
            (
                "db.two.zones",
                (
                    (
                        "example.com",
                        "example.com",
                        8443,
                    ),
                    (
                        "example.org",
                        "example.org",
                        8543,
                    ),
                ),
            ),
        ):
            module = getModule(__name__)
            dataPath = module.filePath.sibling("data")
            bindPath = dataPath.child(zonefile)
            self.patch(config.Scheduling.iSchedule, "DNSDebug", bindPath.path)
            utils.DebugResolver = None

            for domain, result_host, result_port in checks:
                host, port = (yield utils.lookupServerViaSRV(domain))
                self.assertEqual(host, result_host)
                self.assertEqual(port, result_port)
Example #38
0
    def test_TXT_key(self):

        # Need to setup a fake resolver
        module = getModule(__name__)
        dataPath = module.filePath.sibling("data")
        bindPath = dataPath.child("db.example.com")
        self.patch(config.Scheduling.iSchedule, "DNSDebug", bindPath.path)
        utils.DebugResolver = None
        utils._initResolver()

        for d, s, result in (
            ("example.com", "_ischedule", True),
            ("example.com", "_revoked", False),
            ("example.com", "dkim", False),
            ("calendar.example.com", "_ischedule", False),
            ("example.org", "_ischedule", False),
        ):
            dkim = "v=1; d=%s; s = %s; t = 1234; a=rsa-sha1; q=dns/txt ; http=UE9TVDov; c=relaxed/simple; h=Content-Type:Originator:Recipient:Recipient:iSchedule-Version:iSchedule-Message-ID; bh=abc; b=" % (d, s,)
            tester = PublicKeyLookup_DNSTXT(DKIMUtils.extractTags(dkim))
            pkey = yield tester.getPublicKey(False)
            self.assertEqual(pkey is not None, result)
    def test_primary_keys(self):
        """
        Make sure current-oracle-dialect.sql matches current.sql
        """

        schema = schemaFromPath(getModule(__name__).filePath.parent().sibling("sql_schema").child("current.sql"))

        # Set of tables for which missing primary key is allowed
        table_exceptions = (
            "ADDRESSBOOK_OBJECT_REVISIONS",
            "CALENDAR_OBJECT_REVISIONS",
            "NOTIFICATION_OBJECT_REVISIONS",
            "PERUSER",
        )
        # Look at each table
        failures = []
        for table in schema.tables:
            if table.primaryKey is None and table.name not in table_exceptions:
                failures.append(table.name)

        self.assertEqual(len(failures), 0, msg="Missing primary key for tables: %s" % (", ".join(sorted(failures))))
    def setUp(self):
        super(InboundTests, self).setUp()

        yield self.buildStoreAndDirectory()
        self.receiver = MailReceiver(self.store, self.directory)
        self.retriever = MailRetriever(
            self.store, self.directory,
            ConfigDict({
                "Type": "pop",
                "UseSSL": False,
                "Server": "example.com",
                "Port": 123,
                "Username": "******",
            }))

        def decorateTransaction(txn):
            txn._mailRetriever = self.retriever

        self.store.callWithNewTransactions(decorateTransaction)
        module = getModule(__name__)
        self.dataPath = module.filePath.sibling("data")
    def test_matchCalendarUserAddress(self):
        """
        Make sure we do an exact comparison on EmailDomain
        """

        self.patch(config.Scheduling.iSchedule, "Enabled", True)
        self.patch(config.Scheduling.iSchedule, "RemoteServers", "")

        # Only mailtos:
        result = yield ScheduleViaISchedule.matchCalendarUserAddress(
            "http://example.com/principal/user")
        self.assertFalse(result)

        # Need to setup a fake resolver
        module = getModule(__name__)
        dataPath = module.filePath.sibling("data")
        bindPath = dataPath.child("db.example.com")
        self.patch(config.Scheduling.iSchedule, "DNSDebug", bindPath.path)
        utils.DebugResolver = None
        utils._initResolver()

        result = yield ScheduleViaISchedule.matchCalendarUserAddress(
            "mailto:[email protected]")
        self.assertTrue(result)
        result = yield ScheduleViaISchedule.matchCalendarUserAddress(
            "mailto:[email protected]")
        self.assertFalse(result)
        result = yield ScheduleViaISchedule.matchCalendarUserAddress(
            "mailto:[email protected]?subject=foobar")
        self.assertFalse(result)
        result = yield ScheduleViaISchedule.matchCalendarUserAddress(
            "mailto:user")
        self.assertFalse(result)

        # Test when not enabled
        ScheduleViaISchedule.domainServerMap = {}
        self.patch(config.Scheduling.iSchedule, "Enabled", False)
        result = yield ScheduleViaISchedule.matchCalendarUserAddress(
            "mailto:[email protected]")
        self.assertFalse(result)
Example #42
0
    def load_processors(self, load=None, noload=None, autoload=None):
        """If method parameters are not provided, they'll be looked up from
        config:
        [plugins]
            load = List of plugins / plugin.Processors to load
            noload = List of plugins / plugin.Processors to skip automatically loading
            autoload = (Boolean) Load all plugins by default?
        """
        # Sets up twisted.python so that we can iterate modules
        __import__('ibid.plugins')

        if load is None:
            load = ibid.config.plugins.get('load', [])
        if noload is None:
            noload = ibid.config.plugins.get('noload', [])

        all_plugins = set(plugin.split('.')[0] for plugin in load)
        if autoload is None:
            autoload = ibid.config.plugins.get('autoload', True)

        if autoload:
            all_plugins |= set(
                plugin.name.replace('ibid.plugins.', '')
                for plugin in getModule('ibid.plugins').iterModules())

        for plugin in all_plugins:
            load_processors = [
                p.split('.')[1] for p in load if p.startswith(plugin + '.')
            ]
            noload_processors = [
                p.split('.')[1] for p in noload if p.startswith(plugin + '.')
            ]
            if plugin not in noload or load_processors:
                self.load_processor(plugin,
                                    noload=noload_processors,
                                    load=load_processors,
                                    load_all=(plugin in load),
                                    noload_all=(plugin in noload))
    def test_references_index(self):
        """
        Make sure current-oracle-dialect.sql matches current.sql
        """

        schema = schemaFromPath(getModule(__name__).filePath.parent().sibling("sql_schema").child("current.sql"))

        # Get index details
        indexed_columns = set()
        for index in schema.pseudoIndexes():
            indexed_columns.add("%s.%s" % (index.table.name, index.columns[0].name,))
        # print indexed_columns

        # Look at each table
        failures = []
        for table in schema.tables:
            for column in table.columns:
                if column.references is not None:
                    id = "%s.%s" % (table.name, column.name,)
                    if id not in indexed_columns:
                        failures.append(id)

        self.assertEqual(len(failures), 0, msg="Missing index for references columns: %s" % (", ".join(sorted(failures))))
Example #44
0
def _schemaFiles(version=None):
    """
    Find the set of files to process, either the current set if C{version} is L{None}, otherwise look
    for the specific version.

    @param version: version identifier (e.g., "v1", "v2" etc)
    @type version: L{str}
    """
    if version is None:
        currentObj = getModule(__name__).filePath.sibling("sql_schema").child("current.sql")
        extrasObj = getModule(__name__).filePath.sibling("sql_schema").child("current-{}-extras.sql".format(ORACLE_DIALECT))
        outObj = getModule(__name__).filePath.sibling("sql_schema").child("current-{}.sql".format(ORACLE_DIALECT))
    else:
        currentObj = getModule(__name__).filePath.sibling("sql_schema").child("old").child(POSTGRES_DIALECT).child("%s.sql" % (version,))
        extrasObj = getModule(__name__).filePath.sibling("sql_schema").child("old").child(ORACLE_DIALECT).child("{}-extras.sql" % (version,))
        outObj = getModule(__name__).filePath.sibling("sql_schema").child("old").child(ORACLE_DIALECT).child("{}.sql" % (version,))

    return currentObj, extrasObj, outObj
Example #45
0
    def test_current_oracle(self):
        """
        Make sure current-oracle-dialect.sql matches current.sql
        """

        sqlSchema = getModule(__name__).filePath.parent().sibling("sql_schema")

        currentSchema = sqlSchema.child("current.sql")
        current_version = self.versionFromSchema(currentSchema)

        currentOracleSchema = sqlSchema.child("current-oracle-dialect.sql")
        current_oracle_version = self.versionFromSchema(currentOracleSchema)

        self.assertEqual(current_version, current_oracle_version)

        # In Oracle, integers are equivalent to bigint, while in PostgreSQL they are not.
        # For the purpose of this test, convert the PostgreSQL bigints to integers before the comparison.
        schema_current = schemaFromString(currentSchema.getContent().replace(
            "bigint", "integer"))
        schema_oracle = schemaFromPath(currentOracleSchema)

        # Remove any not null constraints in the postgres schema for text columns as in
        # Oracle nclob or nvarchar never uses not null
        for table in schema_current.tables:
            for constraint in tuple(table.constraints):
                if constraint.type == Constraint.NOT_NULL and len(
                        constraint.affectsColumns) == 1:
                    if constraint.affectsColumns[0].type.name in ("text",
                                                                  "char",
                                                                  "varchar"):
                        table.constraints.remove(constraint)

        # Remove stored procedures which we only use on Oracle
        schema_oracle.functions = []

        mismatched = schema_current.compare(schema_oracle)
        self.assertEqual(len(mismatched), 0, msg=", ".join(mismatched))
Example #46
0
    def test_initResolver(self):
        """
        Test L{lookupServerViaSRV} with a local Bind find
        """

        # Default resolver
        utils.DebugResolver = None
        utils._initResolver()
        self.assertNotEqual(utils.DebugResolver, None)
        self.assertFalse(isinstance(utils.DebugResolver, BindAuthority))

        # Patch config for Bind resolver
        for zonefile in (
                "db.example.com",
                "db.two.zones",
        ):
            module = getModule(__name__)
            dataPath = module.filePath.sibling("data")
            bindPath = dataPath.child(zonefile)
            self.patch(config.Scheduling.iSchedule, "DNSDebug", bindPath.path)
            utils.DebugResolver = None
            utils._initResolver()
            self.assertNotEqual(utils.DebugResolver, None)
            self.assertTrue(isinstance(utils.DebugResolver, BindAuthority))
    def test_old_files(self):
        """
        Make sure txdav.common.datastore.sql_schema.old contains all the appropriate old versions
        """

        sqlSchema = getModule(__name__).filePath.parent().sibling("sql_schema")
        currentSchema = sqlSchema.child("current.sql")
        current_version = self.versionFromSchema(currentSchema)
        current_set = set([i for i in range(3, current_version)])

        oldDirectory = sqlSchema.child("old")

        for child in oldDirectory.children():
            if child.basename().startswith("."):
                continue
            old_set = set()
            for oldVersion in child.children():
                if oldVersion.basename().startswith("."):
                    continue
                found = re.search("v(\d+).sql", oldVersion.basename())
                if found is None:
                    self.fail("%s is not a valid old sql file" % (oldVersion))
                old_set.add(int(found.group(1)))
            self.assertEqual(current_set, old_set, msg="Missing old schema file for dialect: %s" % (child.basename(),))
Example #48
0
def sample(name):
    return (getModule("twistedcaldav.test").filePath.sibling("data").child(
        name + ".ics").getContent())
Example #49
0
class HomeMigrationTests(CommonCommonTests, TestCase):
    """
    Tests for L{UpgradeToDatabaseStep}.
    """

    av1 = Component.fromString("""BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//calendarserver.org//Zonal//EN
BEGIN:VAVAILABILITY
ORGANIZER:mailto:[email protected]
UID:[email protected]
DTSTAMP:20061005T133225Z
DTEND:20140101T000000Z
BEGIN:AVAILABLE
UID:[email protected]
DTSTAMP:20061005T133225Z
SUMMARY:Monday to Friday from 9:00 to 17:00
DTSTART:20130101T090000Z
DTEND:20130101T170000Z
RRULE:FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
END:AVAILABLE
END:VAVAILABILITY
END:VCALENDAR
""")

    @inlineCallbacks
    def setUp(self):
        """
        Set up two stores to migrate between.
        """

        yield super(HomeMigrationTests, self).setUp()
        yield self.buildStoreAndDirectory(extraUids=(
            u"home1",
            u"home2",
            u"home3",
            u"home_defaults",
            u"home_no_splits",
            u"home_splits",
            u"home_splits_shared",
        ))
        self.sqlStore = self.store

        # Add some files to the file store.

        self.filesPath = CachingFilePath(self.mktemp())
        self.filesPath.createDirectory()
        fileStore = self.fileStore = CommonDataStore(
            self.filesPath, {"push": StubNotifierFactory()}, self.directory,
            True, True)
        self.upgrader = UpgradeToDatabaseStep(self.fileStore, self.sqlStore)

        requirements = CommonTests.requirements
        extras = deriveValue(self, "extraRequirements", lambda t: {})
        requirements = self.mergeRequirements(requirements, extras)

        yield populateCalendarsFrom(requirements, fileStore)
        md5s = CommonTests.md5s
        yield resetCalendarMD5s(md5s, fileStore)
        self.filesPath.child("calendars").child("__uids__").child("ho").child(
            "me").child("home1").child(".some-extra-data").setContent(
                "some extra data")

        requirements = ABCommonTests.requirements
        yield populateAddressBooksFrom(requirements, fileStore)
        md5s = ABCommonTests.md5s
        yield resetAddressBookMD5s(md5s, fileStore)
        self.filesPath.child("addressbooks").child("__uids__").child(
            "ho").child("me").child("home1").child(
                ".some-extra-data").setContent("some extra data")

        # Add some properties we want to check get migrated over
        txn = self.fileStore.newTransaction()
        home = yield txn.calendarHomeWithUID("home_defaults")

        cal = yield home.calendarWithName("calendar_1")
        props = cal.properties()
        props[PropertyName.fromElement(
            caldavxml.SupportedCalendarComponentSet
        )] = caldavxml.SupportedCalendarComponentSet(
            caldavxml.CalendarComponent(name="VEVENT"),
            caldavxml.CalendarComponent(name="VTODO"),
        )
        props[PropertyName.fromElement(
            element.ResourceType)] = element.ResourceType(
                element.Collection(),
                caldavxml.Calendar(),
            )
        props[PropertyName.fromElement(
            customxml.GETCTag)] = customxml.GETCTag.fromString("foobar")

        inbox = yield home.calendarWithName("inbox")
        props = inbox.properties()
        props[PropertyName.fromElement(
            customxml.CalendarAvailability
        )] = customxml.CalendarAvailability.fromString(str(self.av1))
        props[PropertyName.fromElement(
            caldavxml.ScheduleDefaultCalendarURL
        )] = caldavxml.ScheduleDefaultCalendarURL(
            element.HRef.fromString(
                "/calendars/__uids__/home_defaults/calendar_1"), )

        yield txn.commit()

    def mergeRequirements(self, a, b):
        """
        Merge two requirements dictionaries together, modifying C{a} and
        returning it.

        @param a: Some requirements, in the format of
            L{CommonTests.requirements}.
        @type a: C{dict}

        @param b: Some additional requirements, to be merged into C{a}.
        @type b: C{dict}

        @return: C{a}
        @rtype: C{dict}
        """
        for homeUID in b:
            homereq = a.setdefault(homeUID, {})
            homeExtras = b[homeUID]
            for calendarUID in homeExtras:
                calreq = homereq.setdefault(calendarUID, {})
                calendarExtras = homeExtras[calendarUID]
                calreq.update(calendarExtras)
        return a

    @withSpecialValue(
        "extraRequirements", {
            "home1": {
                "calendar_1": {
                    "bogus.ics":
                    (getModule("twistedcaldav").filePath.sibling("zoneinfo").
                     child("EST.ics").getContent(), CommonTests.metadata1)
                }
            }
        })
    @inlineCallbacks
    def test_unknownTypeNotMigrated(self):
        """
        The only types of calendar objects that should get migrated are VEVENTs
        and VTODOs.  Other component types, such as free-standing VTIMEZONEs,
        don't have a UID and can't be stored properly in the database, so they
        should not be migrated.
        """
        yield self.upgrader.stepWithResult(None)
        txn = self.sqlStore.newTransaction()
        self.addCleanup(txn.commit)
        self.assertIdentical(
            None, (yield
                   (yield
                    (yield
                     (yield txn.calendarHomeWithUID("home1")).calendarWithName(
                         "calendar_1"))).calendarObjectWithName("bogus.ics")))

    @inlineCallbacks
    def test_upgradeCalendarHomes(self):
        """
        L{UpgradeToDatabaseService.startService} will do the upgrade, then
        start its dependent service by adding it to its service hierarchy.
        """

        # Create a fake directory in the same place as a home, but with a non-existent uid
        fake_dir = self.filesPath.child("calendars").child("__uids__").child(
            "ho").child("me").child("foobar")
        fake_dir.makedirs()

        # Create a fake file in the same place as a home,with a name that matches the hash uid prefix
        fake_file = self.filesPath.child("calendars").child("__uids__").child(
            "ho").child("me").child("home_file")
        fake_file.setContent("")

        yield self.upgrader.stepWithResult(None)
        txn = self.sqlStore.newTransaction()
        self.addCleanup(txn.commit)
        for uid in CommonTests.requirements:
            if CommonTests.requirements[uid] is not None:
                self.assertNotIdentical(None,
                                        (yield txn.calendarHomeWithUID(uid)))
        # Successfully migrated calendar homes are deleted
        self.assertFalse(
            self.filesPath.child("calendars").child("__uids__").child(
                "ho").child("me").child("home1").exists())

        # Want metadata preserved
        home = (yield txn.calendarHomeWithUID("home1"))
        calendar = (yield home.calendarWithName("calendar_1"))
        for name, metadata, md5 in (
            ("1.ics", CommonTests.metadata1, CommonTests.md5Values[0]),
            ("2.ics", CommonTests.metadata2, CommonTests.md5Values[1]),
            ("3.ics", CommonTests.metadata3, CommonTests.md5Values[2]),
        ):
            object = (yield calendar.calendarObjectWithName(name))
            self.assertEquals(object.getMetadata(), metadata)
            self.assertEquals(object.md5(), md5)

    @withSpecialValue("extraRequirements", {"nonexistent": {"calendar_1": {}}})
    @inlineCallbacks
    def test_upgradeCalendarHomesMissingDirectoryRecord(self):
        """
        Test an upgrade where a directory record is missing for a home;
        the original home directory will remain on disk.
        """
        yield self.upgrader.stepWithResult(None)
        txn = self.sqlStore.newTransaction()
        self.addCleanup(txn.commit)
        for uid in CommonTests.requirements:
            if CommonTests.requirements[uid] is not None:
                self.assertNotIdentical(None,
                                        (yield txn.calendarHomeWithUID(uid)))
        self.assertIdentical(None,
                             (yield txn.calendarHomeWithUID(u"nonexistent")))
        # Skipped calendar homes are not deleted
        self.assertTrue(
            self.filesPath.child("calendars").child("__uids__").child(
                "no").child("ne").child("nonexistent").exists())

    @inlineCallbacks
    def test_upgradeExistingHome(self):
        """
        L{UpgradeToDatabaseService.startService} will skip migrating existing
        homes.
        """
        startTxn = self.sqlStore.newTransaction("populate empty sample")
        yield startTxn.calendarHomeWithUID("home1", create=True)
        yield startTxn.commit()
        yield self.upgrader.stepWithResult(None)
        vrfyTxn = self.sqlStore.newTransaction("verify sample still empty")
        self.addCleanup(vrfyTxn.commit)
        home = yield vrfyTxn.calendarHomeWithUID("home1")
        # The default calendar is still there.
        self.assertNotIdentical(None,
                                (yield home.calendarWithName("calendar")))
        # The migrated calendar isn't.
        self.assertIdentical(None, (yield home.calendarWithName("calendar_1")))

    @inlineCallbacks
    def test_upgradeAttachments(self):
        """
        L{UpgradeToDatabaseService.startService} upgrades calendar attachments
        as well.
        """

        # Need to tweak config and settings to setup dropbox to work
        self.patch(config, "EnableDropBox", True)
        self.patch(config, "EnableManagedAttachments", False)
        self.sqlStore.enableManagedAttachments = False

        txn = self.sqlStore.newTransaction()
        cs = schema.CALENDARSERVER
        yield Delete(From=cs, Where=cs.NAME == "MANAGED-ATTACHMENTS").on(txn)
        yield txn.commit()

        txn = self.fileStore.newTransaction()
        committed = []

        def maybeCommit():
            if not committed:
                committed.append(True)
                return txn.commit()

        self.addCleanup(maybeCommit)

        @inlineCallbacks
        def getSampleObj():
            home = (yield txn.calendarHomeWithUID("home1"))
            calendar = (yield home.calendarWithName("calendar_1"))
            object = (yield calendar.calendarObjectWithName("1.ics"))
            returnValue(object)

        inObject = yield getSampleObj()
        someAttachmentName = "some-attachment"
        someAttachmentType = MimeType.fromString("application/x-custom-type")
        attachment = yield inObject.createAttachmentWithName(
            someAttachmentName, )
        transport = attachment.store(someAttachmentType)
        someAttachmentData = "Here is some data for your attachment, enjoy."
        transport.write(someAttachmentData)
        yield transport.loseConnection()
        yield maybeCommit()
        yield self.upgrader.stepWithResult(None)
        committed = []
        txn = self.sqlStore.newTransaction()
        outObject = yield getSampleObj()
        outAttachment = yield outObject.attachmentWithName(someAttachmentName)
        allDone = Deferred()

        class SimpleProto(Protocol):
            data = ''

            def dataReceived(self, data):
                self.data += data

            def connectionLost(self, reason):
                allDone.callback(self.data)

        self.assertEquals(outAttachment.contentType(), someAttachmentType)
        outAttachment.retrieve(SimpleProto())
        allData = yield allDone
        self.assertEquals(allData, someAttachmentData)

    @inlineCallbacks
    def test_upgradeAddressBookHomes(self):
        """
        L{UpgradeToDatabaseService.startService} will do the upgrade, then
        start its dependent service by adding it to its service hierarchy.
        """
        yield self.upgrader.stepWithResult(None)
        txn = self.sqlStore.newTransaction()
        self.addCleanup(txn.commit)
        for uid in ABCommonTests.requirements:
            if ABCommonTests.requirements[uid] is not None:
                self.assertNotIdentical(
                    None, (yield txn.addressbookHomeWithUID(uid)))
        # Successfully migrated addressbook homes are deleted
        self.assertFalse(
            self.filesPath.child("addressbooks").child("__uids__").child(
                "ho").child("me").child("home1").exists())

        # Want metadata preserved
        home = (yield txn.addressbookHomeWithUID("home1"))
        adbk = (yield home.addressbookWithName("addressbook"))
        for name, md5 in (
            ("1.vcf", ABCommonTests.md5Values[0]),
            ("2.vcf", ABCommonTests.md5Values[1]),
            ("3.vcf", ABCommonTests.md5Values[2]),
        ):
            object = (yield adbk.addressbookObjectWithName(name))
            self.assertEquals(object.md5(), md5)

    @inlineCallbacks
    def test_upgradeProperties(self):
        """
        L{UpgradeToDatabaseService.startService} will do the upgrade, then
        start its dependent service by adding it to its service hierarchy.
        """
        yield self.upgrader.stepWithResult(None)
        txn = self.sqlStore.newTransaction()
        self.addCleanup(txn.commit)

        # Want metadata preserved
        home = (yield txn.calendarHomeWithUID("home_defaults"))
        cal = (yield home.calendarWithName("calendar_1"))
        inbox = (yield home.calendarWithName("inbox"))

        # Supported components
        self.assertEqual(cal.getSupportedComponents(), "VEVENT")
        self.assertTrue(cal.properties().get(
            PropertyName.fromElement(caldavxml.SupportedCalendarComponentSet))
                        is None)

        # Resource type removed
        self.assertTrue(cal.properties().get(
            PropertyName.fromElement(element.ResourceType)) is None)

        # Ctag removed
        self.assertTrue(cal.properties().get(
            PropertyName.fromElement(customxml.GETCTag)) is None)

        # Availability
        self.assertEquals(str(home.getAvailability()), str(self.av1))
        self.assertTrue(inbox.properties().get(
            PropertyName.fromElement(customxml.CalendarAvailability)) is None)

        # Default calendar
        self.assertTrue(home.isDefaultCalendar(cal))
        self.assertTrue(inbox.properties().get(
            PropertyName.fromElement(caldavxml.ScheduleDefaultCalendarURL)) is
                        None)

    def test_fileStoreFromPath(self):
        """
        Verify that fileStoreFromPath() will return a CommonDataStore if
        the given path contains either "calendars" or "addressbooks"
        sub-directories.  Otherwise it returns None
        """

        # No child directories
        docRootPath = CachingFilePath(self.mktemp())
        docRootPath.createDirectory()
        step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath)
        self.assertEquals(step, None)

        # "calendars" child directory exists
        childPath = docRootPath.child("calendars")
        childPath.createDirectory()
        step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath)
        self.assertTrue(isinstance(step, CommonDataStore))
        childPath.remove()

        # "addressbooks" child directory exists
        childPath = docRootPath.child("addressbooks")
        childPath.createDirectory()
        step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath)
        self.assertTrue(isinstance(step, CommonDataStore))
        childPath.remove()
Example #50
0
def get_server_cert():
    return getModule(__name__).filePath.sibling('server.pem').getContent()
Example #51
0
    def test_namespacedPackages(self):
        """
        Duplicate packages are not yielded when iterating over namespace
        packages.
        """
        # Force pkgutil to be loaded already, since the probe package being
        # created depends on it, and the replaceSysPath call below will make
        # pretty much everything unimportable.
        __import__("pkgutil")

        namespaceBoilerplate = (
            b"import pkgutil; "
            b"__path__ = pkgutil.extend_path(__path__, __name__)")

        # Create two temporary directories with packages:
        #
        #   entry:
        #       test_package/
        #           __init__.py
        #           nested_package/
        #               __init__.py
        #               module.py
        #
        #   anotherEntry:
        #       test_package/
        #           __init__.py
        #           nested_package/
        #               __init__.py
        #               module2.py
        #
        # test_package and test_package.nested_package are namespace packages,
        # and when both of these are in sys.path, test_package.nested_package
        # should become a virtual package containing both "module" and
        # "module2"

        entry = self.pathEntryWithOnePackage()
        testPackagePath = entry.child("test_package")
        testPackagePath.child("__init__.py").setContent(namespaceBoilerplate)

        nestedEntry = testPackagePath.child("nested_package")
        nestedEntry.makedirs()
        nestedEntry.child("__init__.py").setContent(namespaceBoilerplate)
        nestedEntry.child("module.py").setContent(b"")

        anotherEntry = self.pathEntryWithOnePackage()
        anotherPackagePath = anotherEntry.child("test_package")
        anotherPackagePath.child("__init__.py").setContent(
            namespaceBoilerplate)

        anotherNestedEntry = anotherPackagePath.child("nested_package")
        anotherNestedEntry.makedirs()
        anotherNestedEntry.child("__init__.py").setContent(
            namespaceBoilerplate)
        anotherNestedEntry.child("module2.py").setContent(b"")

        self.replaceSysPath([entry.path, anotherEntry.path])

        module = modules.getModule("test_package")

        # We have to use importPackages=True in order to resolve the namespace
        # packages, so we remove the imported packages from sys.modules after
        # walking
        try:
            walkedNames = [
                mod.name for mod in module.walkModules(importPackages=True)
            ]
        finally:
            for module in list(sys.modules.keys()):
                if module.startswith("test_package"):
                    del sys.modules[module]

        expected = [
            "test_package",
            "test_package.nested_package",
            "test_package.nested_package.module",
            "test_package.nested_package.module2",
        ]

        self.assertEqual(walkedNames, expected)
Example #52
0
class WebAdminPage(Element):
    """
    Web administration renderer for HTML.

    @ivar resource: a L{WebAdminResource}.
    """

    loader = XMLFile(getModule(__name__).filePath.sibling("delegation.html"))

    def __init__(self, resource):
        super(WebAdminPage, self).__init__()
        self.resource = resource

    @renderer
    def main(self, request, tag):
        """
        Main renderer, which fills page-global slots like 'title'.
        """
        searchTerm = request.args.get('resourceSearch', [''])[0]
        return tag.fillSlots(resourceSearch=searchTerm)

    @renderer
    @inlineCallbacks
    def hasSearchResults(self, request, tag):
        """
        Renderer which detects if there are resource search results and
        continues if so.
        """
        if 'resourceSearch' not in request.args:
            returnValue('')
        if (yield self.performSearch(request)):
            returnValue(tag)
        else:
            returnValue('')

    @renderer
    @inlineCallbacks
    def noSearchResults(self, request, tag):
        """
        Renderer which detects if there are resource search results and
        continues if so.
        """
        if 'resourceSearch' not in request.args:
            returnValue('')
        rows = yield self.performSearch(request)
        if rows:
            returnValue("")
        else:
            returnValue(tag)

    _searchResults = None

    @inlineCallbacks
    def performSearch(self, request):
        """
        Perform a directory search for users, groups, and resources based on the
        resourceSearch query parameter.  Cache the results of that search so
        that it will only be done once per request.
        """
        if self._searchResults is not None:
            returnValue(self._searchResults)
        searchTerm = request.args.get('resourceSearch', [''])[0]
        if searchTerm:
            results = sorted((yield self.resource.search(searchTerm)),
                             key=lambda record: record.fullNames[0])
        else:
            results = []
        self._searchResults = results
        returnValue(results)

    @renderer
    def searchResults(self, request, tag):
        """
        Renderer which renders resource search results.
        """
        d = self.performSearch(request)
        return d.addCallback(searchToSlots, tag)

    @renderer
    @inlineCallbacks
    def resourceDetails(self, request, tag):
        """
        Renderer which fills slots for details of the resource selected by
        the resourceId request parameter.
        """
        resourceId = request.args.get('resourceId', [''])[0]
        propertyName = request.args.get('davPropertyName', [''])[0]
        proxySearch = request.args.get('proxySearch', [''])[0]
        if resourceId:
            principalResource = yield self.resource.getResourceById(
                request, resourceId)
            returnValue(
                DetailsElement(resourceId, principalResource, propertyName,
                               proxySearch, tag, self.resource))
        else:
            returnValue("")
Example #53
0
def get_public_cert():
    return getModule(__name__).filePath.sibling('public.pem').getContent()
Example #54
0
 def setUp(self):
     self.packageNames = []
     for mod in getModule('twisted').walkModules():
         if mod.isPackage():
             self.packageNames.append(mod.name)
Example #55
0
    def parseArgs(self, *benchmarks):
        if not benchmarks:
            raise UsageError("Specify at least one benchmark")
        self['benchmarks'] = self._selectBenchmarks(list(benchmarks))


def whichPIDs(source, conf):
    """
    Return a list of PIDs to dtrace.
    """
    run = source.preauthChild(conf['ServerRoot']).preauthChild(conf['RunRoot'])
    return [run.child(conf['PIDFile']).getContent()
            ] + [pid.getContent() for pid in run.globChildren('*instance*')]


_benchmarks = getModule("contrib.performance.benchmarks")


def resolveBenchmark(name):
    for module in _benchmarks.iterModules():
        if module.name == ".".join((_benchmarks.name, name)):
            return module.load()
    raise ValueError("Unknown benchmark: %r" % (name, ))


def main():
    from twisted.python.log import startLogging, err

    options = BenchmarkOptions()
    try:
        options.parseOptions(sys.argv[1:])
def getCache(module):
    """
    Compute all the possible loadable plugins, while loading as few as
    possible and hitting the filesystem as little as possible.

    @param module: a Python module object.  This represents a package to search
    for plugins.

    @return: a dictionary mapping module names to CachedDropin instances.
    """
    allCachesCombined = {}
    mod = getModule(module.__name__)
    # don't want to walk deep, only immediate children.
    buckets = {}
    # Fill buckets with modules by related entry on the given package's
    # __path__.  There's an abstraction inversion going on here, because this
    # information is already represented internally in twisted.python.modules,
    # but it's simple enough that I'm willing to live with it.  If anyone else
    # wants to fix up this iteration so that it's one path segment at a time,
    # be my guest.  --glyph
    for plugmod in mod.iterModules():
        fpp = plugmod.filePath.parent()
        if fpp not in buckets:
            buckets[fpp] = []
        bucket = buckets[fpp]
        bucket.append(plugmod)
    for pseudoPackagePath, bucket in buckets.iteritems():
        dropinPath = pseudoPackagePath.child('dropin.cache')
        try:
            lastCached = dropinPath.getModificationTime()
            dropinDotCache = pickle.load(dropinPath.open('r'))
        except:
            dropinDotCache = {}
            lastCached = 0

        needsWrite = False
        existingKeys = {}
        for pluginModule in bucket:
            pluginKey = pluginModule.name.split('.')[-1]
            existingKeys[pluginKey] = True
            if ((pluginKey not in dropinDotCache) or
                (pluginModule.filePath.getModificationTime() >= lastCached)):
                needsWrite = True
                try:
                    provider = pluginModule.load()
                except:
                    # dropinDotCache.pop(pluginKey, None)
                    log.err()
                else:
                    entry = _generateCacheEntry(provider)
                    dropinDotCache[pluginKey] = entry
        # Make sure that the cache doesn't contain any stale plugins.
        for pluginKey in dropinDotCache.keys():
            if pluginKey not in existingKeys:
                del dropinDotCache[pluginKey]
                needsWrite = True
        if needsWrite:
            try:
                dropinPath.setContent(pickle.dumps(dropinDotCache))
            except:
                log.err()
        allCachesCombined.update(dropinDotCache)
    return allCachesCombined
Example #57
0
def holiday(uid):
    return (getModule("twistedcaldav.test").filePath.sibling("data").child(
        "Holidays").child(uid + ".ics").getContent())
Example #58
0
from txdav.who.directory import CalendarDirectoryRecordMixin
from txdav.xml import element as davxml
from txweb2 import responsecode
from txweb2.auth.digest import DigestedCredentials
from txweb2.auth.tls import TLSCredentials
from txweb2.dav.noneprops import NonePropertyStore
from txweb2.dav.util import joinURL
from txweb2.http import HTTPError

try:
    from twistedcaldav.authkerb import NegotiateCredentials
    NegotiateCredentials  # sigh, pyflakes
except ImportError:
    NegotiateCredentials = None

thisModule = getModule(__name__)
log = Logger()


class PermissionsMixIn(ReadOnlyResourceMixIn):
    def defaultAccessControlList(self):
        return succeed(authReadACL)

    @inlineCallbacks
    def accessControlList(self,
                          request,
                          inheritance=True,
                          expanding=False,
                          inherited_aces=None):

        try:
import OpenSSL
import pytest
from service_identity.exceptions import VerificationError
from twisted.internet import reactor
from twisted.internet.ssl import PrivateCertificate
from twisted.python.modules import getModule
from twisted.web.client import Agent, BrowserLikePolicyForHTTPS
from twisted.web.http_headers import Headers

sslcgit_trustRoot = (getModule(__name__).filePath.parent().sibling(
    'certs').child('ssl.cgit.main.pem').getContent())
invalid_trustRoot = (
    getModule(__name__).filePath.sibling('invalid.pem').getContent())


@pytest.fixture
def url_trustRoot(request):
    return request.param


@pytest.fixture
def agent_call(url_trustRoot):
    url, trustRoot = url_trustRoot
    if trustRoot:
        customPolicy = BrowserLikePolicyForHTTPS(
            PrivateCertificate.loadPEM(trustRoot))
        agent = Agent(reactor, customPolicy)
    else:
        agent = Agent(reactor)

    headers = Headers({
Example #60
0
from errno import EPERM
from zope.interface import implements

from twisted.trial import unittest
from twisted.internet import error, interfaces
from twisted.internet import endpoints
from twisted.internet.address import IPv4Address, UNIXAddress
from twisted.internet.protocol import ClientFactory, Protocol
from twisted.test.proto_helpers import MemoryReactor, RaisingMemoryReactor
from twisted.python.failure import Failure

from twisted import plugins
from twisted.python.modules import getModule
from twisted.python.filepath import FilePath

pemPath = getModule("twisted.test").filePath.sibling("server.pem")
casPath = getModule(__name__).filePath.sibling("fake_CAs")
escapedPEMPathName = endpoints.quoteStringArgument(pemPath.path)
escapedCAsPathName = endpoints.quoteStringArgument(casPath.path)

try:
    from twisted.test.test_sslverify import makeCertificate
    from twisted.internet.ssl import CertificateOptions, Certificate, \
        KeyPair, PrivateCertificate
    from OpenSSL.SSL import ContextType
    testCertificate = Certificate.loadPEM(pemPath.getContent())
    testPrivateCertificate = PrivateCertificate.loadPEM(pemPath.getContent())

    skipSSL = False
except ImportError:
    skipSSL = "OpenSSL is required to construct SSL Endpoints"