Beispiel #1
0
    def __init__(self, model):
        if '__pypy__' in sys.modules:
            # try to use psycopg2ct if we are on PyPy
            try:
                from psycopg2ct import compat
                compat.register()

                # monkey patch to dont let Storm crash on register type
                import psycopg2
                psycopg2._psycopg = object

                class _psycopg:
                    UNICODEARRAY = psycopg2.extensions.UNICODEARRAY

                from twisted.python.monkey import MonkeyPatcher
                monkey_patcher = MonkeyPatcher(
                    (psycopg2, '_psycopg', _psycopg))
                monkey_patcher.patch()

            except ImportError:
                raise RuntimeError(
                    'You are trying to use PostgreSQL with PyPy. Regular '
                    'psycopg2 module don\'t work with PyPy, you may install '
                    'psycopg2ct in order to can use psycopg2 with PyPy'
                )

        self.model = model
Beispiel #2
0
 def setUp(self):
     self.monkey_patcher = MonkeyPatcher()
     self.monkey_patcher.addPatch(krpc_sender, "reactor", HollowReactor())
     self.monkey_patcher.patch()
     self.k_iter = KRPC_Iterator()
     self.k_iter.transport = HollowTransport()
     self.target_id = 5
Beispiel #3
0
 def test_put_verifyProperRemoval(self):
     # Replace the time function of the datastore module
     # so that we can artificially speed up time
     monkey_patcher = MonkeyPatcher()
     c = clock()
     c.set(0)
     monkey_patcher.addPatch(datastore, "time", c)
     # Replace the peer_timeout to 5 seconds
     monkey_patcher.addPatch(constants, "peer_timeout", 5)
     monkey_patcher.patch()
     # Insert a node and verify it is within the datastore
     m = self.datastore(self.reactor)
     infohash = 5
     expected_peer = ("127.0.0.1", 5151)
     m.put(infohash, expected_peer)
     peers = m.get(infohash)
     # Iterate over a 1 element list
     for peer in peers:
         self.assertEqual(expected_peer, peer)
     self.assertEquals(1, len(peers))
     # Change the time and verify that the cleaning function
     # actually removes the peer
     c.set(5)
     # TODO hackish, shouldnt reach into object
     m._cleanup(infohash, peer)
     peers = m.get(infohash)
     self.assertEqual(0, len(peers))
     monkey_patcher.restore()
Beispiel #4
0
    def run(self, result):
        """
        Run the test case in the context of a distinct Eliot action.

        The action will finish after the test is done.  It will note the name of
        the test being run.

        All messages emitted by the test will be validated.  They will still be
        delivered to the global logger.
        """
        # The idea here is to decorate the test method itself so that all of
        # the extra logic happens at the point where test/application logic is
        # expected to be.  This `run` method is more like test infrastructure
        # and things do not go well when we add too much extra behavior here.
        # For example, exceptions raised here often just kill the whole
        # runner.
        patcher = MonkeyPatcher()

        # So, grab the test method.
        name = self.case._testMethodName
        original = getattr(self.case, name)
        decorated = with_logging(ensure_text(self.case.id()), original)
        patcher.addPatch(self.case, name, decorated)
        try:
            # Patch it in
            patcher.patch()
            # Then use the rest of the machinery to run it.
            return self._run_tests_with_factory(
                self.case,
                self.handlers,
                self.last_resort,
            ).run(result)
        finally:
            # Clean up the patching for idempotency or something.
            patcher.restore()
Beispiel #5
0
    def test_exclude_from_tilde_expansion(self):
        basedir = "cli/Backup/exclude_from_tilde_expansion"
        fileutil.make_dirs(basedir)
        nodeurl_path = os.path.join(basedir, 'node.url')
        fileutil.write(nodeurl_path, 'http://example.net:2357/')

        # ensure that tilde expansion is performed on exclude-from argument
        exclude_file = u'~/.tahoe/excludes.dummy'

        ns = Namespace()
        ns.called = False
        original_open = open
        def call_file(name, *args, **kwargs):
            if name.endswith("excludes.dummy"):
                ns.called = True
                self.failUnlessEqual(name, abspath_expanduser_unicode(exclude_file))
                return StringIO()
            else:
                return original_open(name, *args, **kwargs)

        if PY2:
            from allmydata.scripts import cli as module_to_patch
        else:
            import builtins as module_to_patch
        patcher = MonkeyPatcher((module_to_patch, 'open', call_file))
        patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from-utf-8', unicode_to_argv(exclude_file), 'from', 'to'])
        self.failUnless(ns.called)
Beispiel #6
0
    def test_config_drive(self):
        """
        The instance ID is retrieved from the config drive in preference to the
        metadata server.
        """
        patch = MonkeyPatcher()
        # A compute_instance_id found on config drive
        drive_compute_instance_id = unicode(uuid4())
        # A compute_instance_id found from the metadata service
        server_compute_instance_id = unicode(uuid4())

        # Set up a fake config drive and point the API to its label
        configdrive_label = filesystem_label_for_test(self)
        device = formatted_loopback_device_for_test(
            self,
            label=configdrive_label,
        )
        with temporary_mount(device.device) as mountpoint:
            metadata_file = mountpoint.descendant(METADATA_RELATIVE_PATH)
            metadata_file.parent().makedirs()
            metadata_file.setContent(
                json.dumps({"uuid": drive_compute_instance_id}))
        patch.addPatch(
            self.api,
            '_config_drive_label',
            configdrive_label,
        )
        # Set up a fake metadata service and point the API to its endpoint
        listening = webserver_for_test(
            self,
            url_path="/" + "/".join(METADATA_RELATIVE_PATH),
            response_content=json.dumps({"uuid": server_compute_instance_id}),
        )

        def set_metadata_service_endpoint(port):
            address = port.getHost()
            endpoint = (address.host, address.port)
            patch.addPatch(
                self.api,
                '_metadata_service_endpoint',
                endpoint,
            )
            return port

        listening.addCallback(set_metadata_service_endpoint)

        # Run compute_instance_id in a separate thread.
        # With the API patched to check the fake metadata sources.
        def start_compute_instance_id(port):
            patch.patch()
            return deferToThread(self.api.compute_instance_id)

        connecting = listening.addCallback(start_compute_instance_id)

        def check(result):
            self.assertEqual(drive_compute_instance_id, result)

        checking = connecting.addCallback(check)
        return checking
Beispiel #7
0
 def test_constructWithPatches(self):
     """
     Constructing a L{MonkeyPatcher} with patches should add all of the
     given patches to the patch list.
     """
     patcher = MonkeyPatcher((self.testObject, "foo", "haha"),
                             (self.testObject, "bar", "hehe"))
     patcher.patch()
     self.assertEqual("haha", self.testObject.foo)
     self.assertEqual("hehe", self.testObject.bar)
     self.assertEqual(self.originalObject.baz, self.testObject.baz)
Beispiel #8
0
 def test_constructWithPatches(self):
     """
     Constructing a L{MonkeyPatcher} with patches should add all of the
     given patches to the patch list.
     """
     patcher = MonkeyPatcher((self.testObject, 'foo', 'haha'),
                             (self.testObject, 'bar', 'hehe'))
     patcher.patch()
     self.assertEqual('haha', self.testObject.foo)
     self.assertEqual('hehe', self.testObject.bar)
     self.assertEqual(self.originalObject.baz, self.testObject.baz)
Beispiel #9
0
    def test_list_removed_containers(self):
        """
        ``DockerClient.list`` does not list containers which are removed,
        during its operation, from another thread.
        """
        patcher = MonkeyPatcher()

        namespace = namespace_for_test(self)
        flocker_docker_client = DockerClient(namespace=namespace)

        name1 = random_name(self)
        adding_unit1 = flocker_docker_client.add(name1, ANY_IMAGE)
        self.addCleanup(flocker_docker_client.remove, name1)

        name2 = random_name(self)
        adding_unit2 = flocker_docker_client.add(name2, ANY_IMAGE)
        self.addCleanup(flocker_docker_client.remove, name2)

        docker_client = flocker_docker_client._client
        docker_client_containers = docker_client.containers

        def simulate_missing_containers(*args, **kwargs):
            """
            Remove a container before returning the original list.
            """
            containers = docker_client_containers(*args, **kwargs)
            container_name1 = flocker_docker_client._to_container_name(name1)
            docker_client.remove_container(
                container=container_name1, force=True)
            return containers

        adding_units = gatherResults([adding_unit1, adding_unit2])

        def get_list(ignored):
            patcher.addPatch(
                docker_client,
                'containers',
                simulate_missing_containers
            )
            patcher.patch()
            return flocker_docker_client.list()

        listing_units = adding_units.addCallback(get_list)

        def check_list(units):
            patcher.restore()
            self.assertEqual(
                [name2], sorted([unit.name for unit in units])
            )
        running_assertions = listing_units.addCallback(check_list)

        return running_assertions
Beispiel #10
0
    def _monkey_patch(self):
        """
        Monkeypatch some parts of the twisted library that are waiting
        for bugfix inclussion in the trunk
        """

        if not self.monkey_patched:
            # add new method
            setattr(http.Request, 'getClientProxyIP', getClientProxyIP)

            # patch getClientIP
            monkey_patcher = MonkeyPatcher(
                (http.Request, 'getClientIP', getClientIPPatch))
            monkey_patcher.patch()
            self.monkey_patched = True
Beispiel #11
0
    def test_metadata_service(self):
        """
        The instance ID is retrieved from the metadata service if it can't be
        found on the config drive.
        """
        patch = MonkeyPatcher()
        # A compute_instance_id found from the metadata service
        server_compute_instance_id = unicode(uuid4())

        # Point the API to a config drive label that won't be found.
        configdrive_label = filesystem_label_for_test(self)
        patch.addPatch(
            self.api,
            '_config_drive_label',
            configdrive_label,
        )
        # Set up a fake metadata service and point the API to its endpoint
        listening = webserver_for_test(
            self,
            url_path="/" + "/".join(METADATA_RELATIVE_PATH),
            response_content=json.dumps({"uuid": server_compute_instance_id}),
        )

        def set_metadata_service_endpoint(port):
            address = port.getHost()
            endpoint = (address.host, address.port)
            patch.addPatch(
                self.api,
                '_metadata_service_endpoint',
                endpoint,
            )
            return port

        listening.addCallback(set_metadata_service_endpoint)

        # Run compute_instance_id in a separate thread.
        # With the API patched to check the fake metadata sources.
        def start_compute_instance_id(port):
            patch.patch()
            return deferToThread(self.api.compute_instance_id)

        connecting = listening.addCallback(start_compute_instance_id)

        def check(result):
            self.assertEqual(server_compute_instance_id, result)

        checking = connecting.addCallback(check)
        return checking
Beispiel #12
0
    def __init__(self, model):
        if '__pypy__' in sys.modules:
            # try to use psycopg2ct if we are on PyPy
            try:
                from psycopg2ct import compat
                compat.register()

                # monkey patch to dont let Storm crash on register type
                import psycopg2
                psycopg2._psycopg = object

                class _psycopg:
                    UNICODEARRAY = psycopg2.extensions.UNICODEARRAY

                from twisted.python.monkey import MonkeyPatcher
                monkey_patcher = MonkeyPatcher(
                    (psycopg2, '_psycopg', _psycopg))
                monkey_patcher.patch()

            except ImportError:
                raise RuntimeError(
                    'You are trying to use PostgreSQL with PyPy. Regular '
                    'psycopg2 module don\'t work with PyPy, you may install '
                    'psycopg2ct in order to can use psycopg2 with PyPy'
                )

        self.model = model

        self._columns_mapping = {
            properties.Bool: 'bool',
            properties.UUID: 'uuid',
            properties.RawStr: 'bytea',
            properties.Pickle: 'bytea',
            properties.JSON: 'json',
            properties.DateTime: 'timestamp',
            properties.Date: 'date',
            properties.Time: 'time',
            properties.TimeDelta: 'interval',
            properties.Enum: 'integer',
            properties.Decimal: 'decimal'
        }

        self.parse = singledispatch(self.parse)
        self.parse.register(properties.Int, self._parse_int)
        self.parse.register(properties.Unicode, self._parse_unicode)
        self.parse.register(properties.Float, self._parse_float)
        self.parse.register(properties.List, self._parse_list)
        self.parse.register(NativeEnum, self._parse_enum)
Beispiel #13
0
 def test_unknown_instance_id(self):
     """
     ``UnknownInstanceID`` is raised if all node UUID lookup mechanisms
     fail.
     """
     patch = MonkeyPatcher()
     # Use non-existent config drive label.
     # Mount will fail.
     patch.addPatch(self.api, '_config_drive_label',
                    filesystem_label_for_test(self))
     # Use an unreachable metadata service endpoint address.
     # TCP connections will fail.
     patch.addPatch(self.api, '_metadata_service_endpoint',
                    find_free_port())
     self.addCleanup(patch.restore)
     patch.patch()
     self.assertRaises(UnknownInstanceID, self.api.compute_instance_id)
Beispiel #14
0
    def test_exclude_from_tilde_expansion(self):
        basedir = "cli/Backup/exclude_from_tilde_expansion"
        fileutil.make_dirs(basedir)
        nodeurl_path = os.path.join(basedir, 'node.url')
        fileutil.write(nodeurl_path, 'http://example.net:2357/')

        # ensure that tilde expansion is performed on exclude-from argument
        exclude_file = u'~/.tahoe/excludes.dummy'

        ns = Namespace()
        ns.called = False
        def call_file(name, *args):
            ns.called = True
            self.failUnlessEqual(name, abspath_expanduser_unicode(exclude_file))
            return StringIO()

        patcher = MonkeyPatcher((__builtin__, 'file', call_file))
        patcher.runWithPatches(parse_options, basedir, "backup", ['--exclude-from', unicode_to_argv(exclude_file), 'from', 'to'])
        self.failUnless(ns.called)
Beispiel #15
0
    def test_report_import_error(self):
        marker = "wheeeyo"
        real_import_func = __import__
        def raiseIE_from_this_particular_func(name, *args):
            if name == "foolscap":
                raise ImportError(marker + " foolscap cant be imported")
            else:
                return real_import_func(name, *args)

        # Let's run as little code as possible with __import__ patched.
        patcher = MonkeyPatcher((__builtin__, '__import__', raiseIE_from_this_particular_func))
        vers_and_locs, errors = patcher.runWithPatches(allmydata.get_package_versions_and_locations)

        foolscap_stuffs = [stuff for (pkg, stuff) in vers_and_locs if pkg == 'foolscap']
        self.failUnlessEqual(len(foolscap_stuffs), 1)
        comment = str(foolscap_stuffs[0][2])
        self.failUnlessIn(marker, comment)
        self.failUnlessIn('raiseIE_from_this_particular_func', comment)

        self.failUnless([e for e in errors if "dependency \'foolscap\' could not be imported" in e])
Beispiel #16
0
    def __init__(self, pool=None, testing=False):
        if pool is not None:
            self.pool = pool

        self.started = False
        self.__testing = testing

        if not self.zstorm_configured:
            provideUtility(global_zstorm, IZStorm)
            zstorm = getUtility(IZStorm)
            zstorm.set_default_uri('mamba', config.Database().uri)

        SQLite.register()
        MySQL.register()
        PostgreSQL.register()

        # MonkeyPatch Storm
        if not self.monkey_patched:
            monkey_patcher = MonkeyPatcher(
                (properties, 'PropertyColumn', PropertyColumnMambaPatch))
            monkey_patcher.patch()
            self.monkey_patched = True
Beispiel #17
0
 def test_error_logging(self, logger):
     """
     Failures while applying a diff emit a log message containing the full
     diff.
     """
     o1 = DiffTestObjInvariant(
         a=1,
         b=2,
     )
     patcher = MonkeyPatcher()
     patcher.addPatch(DiffTestObjInvariant, '_perform_invariant_check',
                      False)
     patcher.patch()
     try:
         o2 = o1.set('b', 1)
     finally:
         patcher.restore()
     diff = create_diff(o1, o2)
     self.assertRaises(
         InvariantException,
         diff.apply,
         o1,
     )
Beispiel #18
0
 def test_put_reannounceResetsTimer(self):
     # Replace the time function of the datastore module
     # so that we can artificially speed up time
     monkey_patcher = MonkeyPatcher()
     c = clock()
     c.set(0)
     monkey_patcher.addPatch(datastore, "time", c)
     # Replace the peer_timeout to 5 seconds
     monkey_patcher.addPatch(constants, "peer_timeout", 5)
     monkey_patcher.patch()
     # Insert a node and verify it is within the datastore
     m = self.datastore(self.reactor)
     infohash = 5
     expected_peer = ("127.0.0.1", 5151)
     m.put(infohash, expected_peer)
     peers = m.get(infohash)
     # Iterate over a 1 element list
     self.assertEquals(1, len(peers))
     for peer in peers:
         self.assertEqual(expected_peer, peer)
     # Change the time and reannounce the peer
     # (make sure the cleanup function doesnt
     #  remove the peer yet)
     c.set(4)
     m.put(infohash, expected_peer)
     peers = m.get(infohash)
     self.assertEqual(1, len(peers))
     m._cleanup(infohash, expected_peer)
     c.set(8)
     m._cleanup(infohash, expected_peer)
     peers = m.get(infohash)
     self.assertEqual(1, len(peers))
     c.set(9)
     m._cleanup(infohash, expected_peer)
     peers = m.get(infohash)
     self.assertEqual(0, len(peers))
     monkey_patcher.restore()
Beispiel #19
0
    def setUp(self):
        self.clock = Clock()
        self.monkey_patcher = MonkeyPatcher()
        self.monkey_patcher.addPatch(rate_limiter.time, "time", self.clock)
        self.monkey_patcher.patch()

        self.address = ("127.0.0.1", 55)
        self.query = Query()
        self.query.rpctype = "ping"
        self.query._from = 15
        self.query._transaction_id = 99
        self.packet = krpc_coder.encode(self.query)
        # Patch in hardcoded value for the bandwidth
        # limits so that changing the constants will
        # not effect the usefulness of this test case
        # (The global bandwidth is set to 3 standard ping queries)
        # (The per user bandwidth is set to 1 standard ping query)
        self.monkey_patcher.addPatch(rate_limiter.constants,
                                     "global_bandwidth_rate",
                                     3 * len(self.packet))
        self.monkey_patcher.addPatch(rate_limiter.constants,
                                     "host_bandwidth_rate",
                                     1 * len(self.packet))
        self.monkey_patcher.patch()
Beispiel #20
0
def get_inotify_module():
    # Until Twisted #9579 is fixed, the Docker check just screws things up.
    # Disable it.
    monkey = MonkeyPatcher()
    monkey.addPatch(runtime.platform, "isDocker", lambda: False)
    return monkey.runWithPatches(_get_inotify_module)
Beispiel #21
0
 def setUp(self):
     self.clock = Clock()
     self.monkey_patcher = MonkeyPatcher()
     self.monkey_patcher.addPatch(rate_limiter.time, "time", self.clock)
     self.monkey_patcher.patch()
Beispiel #22
0
 def _persist_patch(self, obj, attribute, value):
     monkey_patch = MonkeyPatcher((obj, attribute, value))
     self._persist_patches.append(monkey_patch)
     monkey_patch.patch()
     return monkey_patch
Beispiel #23
0
    def test_no_retry_authentication(self):
        """
        The API object returned by ``cinder_from_configuration`` will retry
        authentication even when initial authentication attempts fail.
        """
        import twisted.web.http
        self.patch(twisted.web.http.HTTPChannel, 'checkPersistence',
                   lambda self, request, version: False)
        patch = MonkeyPatcher()
        patch.addPatch(twisted.web.http.HTTPChannel, 'connectionMade',
                       lambda self: self.transport.loseConnection())
        self.addCleanup(patch.restore)
        mimic_starting = mimic_for_test(test_case=self)

        def build_api(listening_port):
            backend, api_args = backend_and_api_args_from_configuration({
                "backend":
                "openstack",
                "auth_plugin":
                "rackspace",
                "region":
                "ORD",
                "username":
                "******",
                "api_key":
                "12345",
                "auth_url":
                "http://127.0.0.1:{}/identity/v2.0".format(
                    listening_port.getHost().port),
            })
            patch.patch()
            api = get_api(
                backend=backend,
                api_args=api_args,
                reactor=object(),
                cluster_id=make_cluster_id(TestTypes.FUNCTIONAL),
            )
            patch.restore()
            return api

        mimic_started = mimic_starting.addCallback(build_api)

        def list_volumes(api, force_connection_failure=False):
            if force_connection_failure:
                patch.patch()
            try:
                return api.list_volumes()
            finally:
                patch.restore()

        def check_failing_connection(api):
            d = deferToThread(
                lambda api: list_volumes(api, force_connection_failure=True),
                api,
            )
            d = self.assertFailure(d, ConnectFailure)
            # return the api for further testing.
            d = d.addCallback(lambda failure_instance: api)
            return d

        listing_volumes1 = mimic_started.addCallback(check_failing_connection)

        def check_successful_connection(api):
            d = deferToThread(
                lambda api: list_volumes(api, force_connection_failure=False),
                api,
            )
            d = d.addCallback(lambda result: self.assertEqual([], result))
            return d

        finishing = listing_volumes1.addCallback(check_successful_connection)

        return finishing
Beispiel #24
0
from twisted.trial import unittest
from twisted.python.monkey import MonkeyPatcher

from mdht import contact
from mdht import constants
from mdht.coding import basic_coder
from mdht.test.utils import Clock

monkey_patcher = MonkeyPatcher()


class NodeTestCase(unittest.TestCase):
    def setUp(self):
        # Reach into the contact module and replace
        # its time function with a custom one
        self.clock = Clock()
        monkey_patcher.addPatch(contact, "time", self.clock)
        monkey_patcher.patch()

    def tearDown(self):
        # Restore the old time module
        monkey_patcher.restore()

    def test_distance(self):
        node_ids1 = [0, 1024, 2**150, 2**159 + 124, 2**34 - 58]
        node_ids2 = [0, 857081, 6**7, 8**9 + 7**3, 4**8 + 9**10 + 18]
        for id1 in node_ids1:
            for id2 in node_ids2:
                n = contact.Node(id1, ("127.0.0.1", 8000))
                self.assertEqual(id1 ^ id2, n.distance(id2))
                n = contact.Node(id2, ("127.0.0.1", 8000))
Beispiel #25
0
 def setUp(self):
     self.testObject = TestObj()
     self.originalObject = TestObj()
     self.monkeyPatcher = MonkeyPatcher()
Beispiel #26
0
    def _build_and_test_api(self, listening_port):
        """
        Build the CinderBlockDeviceAPI configured to connect to the Mimic
        server at ``listening_port``.
        Patch twisted.web to force the mimic server to drop incoming
        connections.
        And attempt to interact with the disabled API server first and then
        after re-enabling it to show that the API will re-authenticate even
        after an initial failure.
        """
        import twisted.web.http
        patch = MonkeyPatcher()
        patch.addPatch(twisted.web.http.HTTPChannel, 'connectionMade',
                       lambda self: self.transport.loseConnection())
        self.addCleanup(patch.restore)
        backend, api_args = backend_and_api_args_from_configuration({
            "backend":
            "openstack",
            "auth_plugin":
            "rackspace",
            "region":
            "ORD",
            "username":
            "******",
            "api_key":
            "12345",
            "auth_url":
            "http://127.0.0.1:{}/identity/v2.0".format(
                listening_port.getHost().port),
        })
        # Cause the Mimic server to close incoming connections
        patch.patch()
        api = get_api(
            backend=backend,
            api_args=api_args,
            reactor=object(),
            cluster_id=make_cluster_id(TestTypes.FUNCTIONAL),
        )
        # List volumes with API patched to close incoming connections.
        try:
            result = api.list_volumes()
        except ConnectFailure:
            # Can't use self.assertRaises here because that would call the
            # function in the main thread.
            pass
        else:
            self.fail('ConnectFailure was not raised. '
                      'Got {!r} instead.'.format(result))
        finally:
            # Re-enable the Mimic server.
            # The API operations that follow should succeed.
            patch.restore()

        # List volumes with API re-enabled
        result = api.list_volumes()
        self.assertEqual([], result)

        # Close the connection from the client side so that the mimic server
        # can close down without leaving behind lingering persistent HTTP
        # channels which cause dirty reactor errors.
        # XXX: This is gross. Perhaps we need ``IBlockDeviceAPI.close``
        (api.cinder_volume_manager._original._client_v2._cinder_volumes.api.
         client.session.session.close())
Beispiel #27
0
    def build(self, projectName, projectURL, sourceURL, packagePath,
              outputPath):
        """
        Call pydoctor's entry point with options which will generate HTML
        documentation for the specified package's API.

        @type projectName: C{str}
        @param projectName: The name of the package for which to generate
            documentation.

        @type projectURL: C{str}
        @param projectURL: The location (probably an HTTP URL) of the project
            on the web.

        @type sourceURL: C{str}
        @param sourceURL: The location (probably an HTTP URL) of the root of
            the source browser for the project.

        @type packagePath: L{FilePath}
        @param packagePath: The path to the top-level of the package named by
            C{projectName}.

        @type outputPath: L{FilePath}
        @param outputPath: An existing directory to which the generated API
            documentation will be written.
        """
        intersphinxes = []

        for intersphinx in intersphinxURLs:
            intersphinxes.append("--intersphinx")
            intersphinxes.append(intersphinx)

        # Super awful monkeypatch that will selectively use our templates.
        from pydoctor.templatewriter import util
        originalTemplatefile = util.templatefile

        def templatefile(filename):

            if filename in ["summary.html", "index.html", "common.html"]:
                twistedPythonDir = FilePath(__file__).parent()
                templatesDir = twistedPythonDir.child("_pydoctortemplates")
                return templatesDir.child(filename).path
            else:
                return originalTemplatefile(filename)

        monkeyPatch = MonkeyPatcher((util, "templatefile", templatefile))
        monkeyPatch.patch()

        from pydoctor.driver import main

        args = [
            u"--project-name",
            projectName,
            u"--project-url",
            projectURL,
            u"--system-class",
            u"twisted.python._pydoctor.TwistedSystem",
            u"--project-base-dir",
            packagePath.parent().path,
            u"--html-viewsource-base",
            sourceURL,
            u"--add-package",
            packagePath.path,
            u"--html-output",
            outputPath.path,
            u"--html-write-function-pages",
            u"--quiet",
            u"--make-html",
        ] + intersphinxes
        args = [arg.encode("utf-8") for arg in args]
        main(args)

        monkeyPatch.restore()
Beispiel #28
0
if '__pypy__' in sys.modules:
    # try to use psycopg2ct if we are on PyPy
    try:
        from psycopg2ct import compat
        compat.register()

        # monkey patch to dont let Storm crash on register type
        import psycopg2
        psycopg2._psycopg = object

        class _psycopg:
            UNICODEARRAY = psycopg2.extensions.UNICODEARRAY

        from twisted.python.monkey import MonkeyPatcher
        monkey_patcher = MonkeyPatcher(
            (psycopg2, '_psycopg', _psycopg))
        monkey_patcher.patch()

    except ImportError:
        raise RuntimeError(
            'You are trying to use PostgreSQL with PyPy. Regular '
            'psycopg2 module don\'t work with PyPy, you may install '
            'psycopg2ct in order to can use psycopg2 with PyPy'
        )

from storm.database import URI
from storm.zope.interfaces import IZStorm
from storm.zope.zstorm import global_zstorm
from twisted.python.threadpool import ThreadPool
from zope.component import provideUtility, getUtility
from storm.twisted.transact import Transactor, DisconnectionError