Exemple #1
0
    def test_worker_multiple_substantiations_succeed(self):
        """
        If multiple builders trigger try to substantiate a worker at
        the same time, if the substantiation succeeds then all of
        the builds proceeed.
        """
        controller = LatentController('local')
        config_dict = {
            'builders': [
                BuilderConfig(name="testy-1",
                              workernames=["local"],
                              factory=BuildFactory(),
                              ),
                BuilderConfig(name="testy-2",
                              workernames=["local"],
                              factory=BuildFactory(),
                              ),
            ],
            'workers': [controller.worker],
            'protocols': {'null': {}},
            'multiMaster': True,
        }
        master = self.successResultOf(
            getMaster(self, self.reactor, config_dict))
        builder_ids = [
            self.successResultOf(master.data.updates.findBuilderId('testy-1')),
            self.successResultOf(master.data.updates.findBuilderId('testy-2')),
        ]

        finished_builds = []
        self.successResultOf(master.mq.startConsuming(
            lambda key, build: finished_builds.append(build),
            ('builds', None, 'finished')))

        # Trigger a buildrequest
        bsid, brids = self.successResultOf(
            master.data.updates.addBuildset(
                waited_for=False,
                builderids=builder_ids,
                sourcestamps=[
                    {'codebase': '',
                     'repository': '',
                     'branch': None,
                     'revision': None,
                     'project': ''},
                ],
            )
        )

        # The worker fails to substantiate.
        controller.start_instance(True)

        local_workdir = FilePath(self.mktemp())
        local_workdir.createDirectory()
        controller.connect_worker(local_workdir)

        # We check that there were two builds that finished, and
        # that they both finished with success
        self.assertEqual([build['results']
                          for build in finished_builds], [SUCCESS] * 2)
Exemple #2
0
    def _backup_pcap(self, username, ip_addr):
        """
        Backup existing pcap file. Used when restarting traffic capture for
        ACTIVE accounts and after the account expires.

        :param username (str): account username
        :param ip_addr (IPv4Address): IP address allocated for the account.

        """
        log.debug("ACCOUNTS:: Backing up pcap for {} with IP {}.".format(
            username, str(ip_addr)))

        day_month_str = datetime.now().strftime("%m%d%H%M")
        cur_pcap_file = "{}_{}.pcap".format(username, str(ip_addr))
        new_path = os.path.join(self.path['pcaps'],
                                "{}_{}".format(username, str(ip_addr)))
        new_path_fp = FilePath(new_path)
        if not new_path_fp.isdir():
            log.debug("ACCOUNTS:: Creating directory {}".format(new_path))
            new_path_fp.createDirectory()

        new_pcap_file = "{}_{}_{}.pcap".format(username, str(ip_addr),
                                               day_month_str)
        cur_pcap_file = os.path.join(self.path['pcaps'], cur_pcap_file)
        new_pcap_file = os.path.join(new_path, new_pcap_file)
        log.debug("ACCOUNTS:: Current pcap file {}".format(cur_pcap_file))
        log.debug("ACCOUNTS:: New pcap file {}".format(new_pcap_file))

        fp = FilePath(cur_pcap_file)
        backup_fp = FilePath(new_pcap_file)
        fp.moveTo(backup_fp)
        backup_fp.chmod(0654)
    def test_path_relative(self):
        """
        If the ``path`` argument is relative, the path is combined with the
        current working directory. The command is executed in that directory,
        and that directory is logged.
        """
        base_path = FilePath(self.mktemp())
        base_path.createDirectory()
        child_path = base_path.child('child')
        child_path.createDirectory()
        cmd = [sys.executable, '-c', 'import os, sys; sys.stdout.write(os.getcwd())']

        old_cwd = os.getcwd()
        os.chdir(base_path.path)
        self.addCleanup(os.chdir, old_cwd)

        self.setupStep(
            master.MasterShellCommand(command=cmd, path="child"))
        self.expectLogfile('stdio', child_path.path)
        self.expectOutcome(result=SUCCESS, status_text=["Ran"])
        d = self.runStep()

        @d.addCallback
        def check(_):
            headers = self.step_status.logs['stdio'].header.splitlines()
            self.assertIn(" in dir %s" % (child_path.path,), headers)
        return d
Exemple #4
0
def getMaster(case, reactor, config_dict):
    """
    Create a started ``BuildMaster`` with the given configuration.
    """
    basedir = FilePath(case.mktemp())
    basedir.createDirectory()
    config_dict['buildbotNetUsageData'] = None
    master = BuildMaster(
        basedir.path, reactor=reactor, config_loader=DictLoader(config_dict))

    if 'db_url' not in config_dict:
        config_dict['db_url'] = 'sqlite://'

    # TODO: Allow BuildMaster to transparently upgrade the database, at least
    # for tests.
    master.config.db['db_url'] = config_dict['db_url']
    yield master.db.setup(check_version=False)
    yield master.db.model.upgrade()
    master.db.setup = lambda: None

    yield master.startService()
    # and shutdown the db threadpool, as is normally done at reactor stop
    case.addCleanup(master.db.pool.shutdown)
    case.addCleanup(master.stopService)

    defer.returnValue(master)
Exemple #5
0
    def test_alwaysPreferPy(self):
        """
        Verify that .py files will always be preferred to .pyc files, regardless of
        directory listing order.
        """
        mypath = FilePath(self.mktemp())
        mypath.createDirectory()
        pp = modules.PythonPath(sysPath=[mypath.path])
        originalSmartPath = pp._smartPath

        def _evilSmartPath(pathName):
            o = originalSmartPath(pathName)
            originalChildren = o.children

            def evilChildren():
                # normally this order is random; let's make sure it always
                # comes up .pyc-first.
                x = list(originalChildren())
                x.sort()
                x.reverse()
                return x

            o.children = evilChildren
            return o

        mypath.child("abcd.py").setContent(b"\n")
        compileall.compile_dir(mypath.path, quiet=True)
        # sanity check
        self.assertEqual(len(list(mypath.children())), 2)
        pp._smartPath = _evilSmartPath
        self.assertEqual(pp["abcd"].filePath, mypath.child("abcd.py"))
Exemple #6
0
 def connect_worker(self, case):
     if RemoteWorker is None:
         raise SkipTest("buildbot-worker package is not installed")
     workdir = FilePath(case.mktemp())
     workdir.createDirectory()
     self.remote_worker = RemoteWorker(self.worker.name, workdir.path, False)
     self.remote_worker.setServiceParent(self.worker)
Exemple #7
0
 def setupJobdir(self):
     jobdir = FilePath(self.mktemp())
     jobdir.createDirectory()
     self.jobdir = jobdir.path
     for sub in 'new', 'tmp', 'cur':
         jobdir.child(sub).createDirectory()
     return self.jobdir
Exemple #8
0
    def test_alwaysPreferPy(self):
        """
        Verify that .py files will always be preferred to .pyc files, regardless of
        directory listing order.
        """
        mypath = FilePath(self.mktemp())
        mypath.createDirectory()
        pp = modules.PythonPath(sysPath=[mypath.path])
        originalSmartPath = pp._smartPath

        def _evilSmartPath(pathName):
            o = originalSmartPath(pathName)
            originalChildren = o.children

            def evilChildren():
                # normally this order is random; let's make sure it always
                # comes up .pyc-first.
                x = originalChildren()
                x.sort()
                x.reverse()
                return x

            o.children = evilChildren
            return o

        mypath.child("abcd.py").setContent("\n")
        compileall.compile_dir(mypath.path, quiet=True)
        # sanity check
        self.assertEquals(len(mypath.children()), 2)
        pp._smartPath = _evilSmartPath
        self.assertEquals(pp["abcd"].filePath, mypath.child("abcd.py"))
Exemple #9
0
    def setupConfig(self, config_dict, startWorker=True):
        """
        Setup and start a master configured
        by the function configFunc defined in the test module.
        @type config_dict: dict
        @param configFunc: The BuildmasterConfig dictionary.
        """
        # mock reactor.stop (which trial *really* doesn't
        # like test code to call!)
        stop = mock.create_autospec(reactor.stop)
        self.patch(reactor, 'stop', stop)

        if startWorker:
            if self.proto == 'pb':
                proto = {"pb": {"port": "tcp:0:interface=127.0.0.1"}}
                workerclass = worker.Worker
            elif self.proto == 'null':
                proto = {"null": {}}
                workerclass = worker.LocalWorker
            config_dict['workers'] = [workerclass("local1", "localpw")]
            config_dict['protocols'] = proto

        m = yield getMaster(self, reactor, config_dict)
        self.master = m
        self.assertFalse(stop.called,
                         "startService tried to stop the reactor; check logs")

        if not startWorker:
            return

        if self.proto == 'pb':
            # We find out the worker port automatically
            workerPort = list(itervalues(m.pbmanager.dispatchers))[
                0].port.getHost().port

            # create a worker, and attach it to the master, it will be started, and stopped
            # along with the master
            worker_dir = FilePath(self.mktemp())
            worker_dir.createDirectory()
            self.w = Worker(
                "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path,
                False)
        elif self.proto == 'null':
            self.w = None
        if self.w is not None:
            self.w.startService()
            self.addCleanup(self.w.stopService)

        @defer.inlineCallbacks
        def dump():
            if not self._passed:
                dump = StringIO.StringIO()
                print("FAILED! dumping build db for debug", file=dump)
                builds = yield self.master.data.get(("builds",))
                for build in builds:
                    yield self.printBuild(build, dump, withLogs=True)

                raise self.failureException(dump.getvalue())
        self.addCleanup(dump)
Exemple #10
0
    def test_latent_max_builds(self):
        """
        If max_builds is set, only one build is started on a latent
        worker at a time.
        """
        controller = LatentController(
            'local',
            max_builds=1,
        )
        step_controller = StepController()
        config_dict = {
            'builders': [
                BuilderConfig(name="testy-1",
                              workernames=["local"],
                              factory=BuildFactory([step_controller]),
                              ),
                BuilderConfig(name="testy-2",
                              workernames=["local"],
                              factory=BuildFactory([step_controller]),
                              ),
            ],
            'workers': [controller.worker],
            'protocols': {'null': {}},
            'multiMaster': True,
        }
        master = self.successResultOf(getMaster(self, self.reactor, config_dict))
        builder_ids = [
            self.successResultOf(master.data.updates.findBuilderId('testy-1')),
            self.successResultOf(master.data.updates.findBuilderId('testy-2')),
        ]

        started_builds = []
        self.successResultOf(master.mq.startConsuming(
            lambda key, build: started_builds.append(build),
            ('builds', None, 'new')))

        # Trigger a buildrequest
        bsid, brids = self.successResultOf(
            master.data.updates.addBuildset(
                waited_for=False,
                builderids=builder_ids,
                sourcestamps=[
                    {'codebase': '',
                     'repository': '',
                     'branch': None,
                     'revision': None,
                     'project': ''},
                ],
            )
        )

        # The worker fails to substantiate.
        controller.start_instance(True)

        local_workdir = FilePath(self.mktemp())
        local_workdir.createDirectory()
        controller.connect_worker(local_workdir)

        self.assertEqual(len(started_builds), 1)
Exemple #11
0
 def create_service():
     path = FilePath(test.mktemp())
     path.createDirectory()
     pool = FilesystemStoragePool(path)
     service = VolumeService(FilePath(test.mktemp()), pool, reactor=Clock())
     service.startService()
     test.addCleanup(service.stopService)
     return service
Exemple #12
0
 def create_service():
     path = FilePath(test.mktemp())
     path.createDirectory()
     pool = FilesystemStoragePool(path)
     service = VolumeService(FilePath(test.mktemp()), pool, reactor=Clock())
     service.startService()
     test.addCleanup(service.stopService)
     return service
Exemple #13
0
    def cbConnect(self, directoryService):
        """
        Callback from the directory service.

        From this point we're connected and authenticated.
        """
        basepath = FilePath(os.path.expanduser('~/.distfs'))
        if not basepath.exists():
            basepath.createDirectory()

        store = FileSystemStore(basepath.child('store').path)
        chunkFactory = Site(server.StoreResource(store))

        locname = self['alias'] or directoryService.service

        # Listen for remote connections.  This is for the other nodes
        # to access our store.
        port = self['port'] and int(self['port']) or 0
        listeningPort = reactor.listenTCP(port, chunkFactory)

        keyStore = SQLiteDataStore(basepath.child('%s.db' % locname).path)
        dhtNode = KademliaNode(listeningPort.getHost().port,
                               keyStore,
                               reactor=reactor)

        # Listen locally so that applications can easily access the
        # store.
        reactor.listenUNIX(
            basepath.child('%s.http' % locname).path, chunkFactory)

        resolverPublisher = ResolverPublisher(dhtNode)

        controlFactory = control.ControlFactory(store, directoryService,
                                                dhtNode, resolverPublisher)
        reactor.listenUNIX(
            basepath.child('%s.ctrl' % locname).path, controlFactory)

        # Start a looping call that will publish chunks to the
        # overlay; do that every 6th hour.  Delay the procedure a bit
        # so that the node has a chance to join the network.
        looping = task.LoopingCall(publishChunks, store, resolverPublisher)
        reactor.callLater(10, looping.start, 6 * 60 * 60, True)

        # Try joining the network.
        introducers = list()
        if self['introducer']:
            try:
                address, port = self['introducer'].split(':')
            except ValueError:
                address, port = self['introducer'], 8033
            introducers.append((address, int(port)))
        dhtNode.joinNetwork(introducers)

        # At this point everything that can go (majorly) wrong has
        # been initialized and we can daemonize.
        if not self['no-daemon']:
            daemonize()
Exemple #14
0
    def cbConnect(self, directoryService):
        """
        Callback from the directory service.

        From this point we're connected and authenticated.
        """
        basepath = FilePath(os.path.expanduser('~/.distfs'))
        if not basepath.exists():
            basepath.createDirectory()

        store = FileSystemStore(basepath.child('store').path)
        chunkFactory = Site(server.StoreResource(store))

        locname = self['alias'] or directoryService.service

        # Listen for remote connections.  This is for the other nodes
        # to access our store.
        port = self['port'] and int(self['port']) or 0
        listeningPort = reactor.listenTCP(port, chunkFactory)

        keyStore = SQLiteDataStore(basepath.child('%s.db' % locname).path)
        dhtNode = KademliaNode(listeningPort.getHost().port, keyStore,
                               reactor=reactor)
        
        # Listen locally so that applications can easily access the
        # store.
        reactor.listenUNIX(basepath.child('%s.http' % locname).path,
                           chunkFactory)

        resolverPublisher = ResolverPublisher(dhtNode)
        
        controlFactory = control.ControlFactory(store, directoryService,
                                                dhtNode, resolverPublisher)
        reactor.listenUNIX(basepath.child('%s.ctrl' % locname).path,
                           controlFactory)

        # Start a looping call that will publish chunks to the
        # overlay; do that every 6th hour.  Delay the procedure a bit
        # so that the node has a chance to join the network.
        looping = task.LoopingCall(publishChunks, store, resolverPublisher)
        reactor.callLater(10, looping.start, 6*60*60, True)

        # Try joining the network.
        introducers = list()
        if self['introducer']:
            try:
                address, port = self['introducer'].split(':')
            except ValueError:
                address, port = self['introducer'], 8033
            introducers.append((address, int(port)))
        dhtNode.joinNetwork(introducers)

        # At this point everything that can go (majorly) wrong has
        # been initialized and we can daemonize.
        if not self['no-daemon']:
            daemonize()
 def getMaster(self, config_dict):
     """
     Create a started ``BuildMaster`` with the given configuration.
     """
     basedir = FilePath(self.mktemp())
     basedir.createDirectory()
     master = BuildMaster(
         basedir.path, reactor=reactor, config_loader=DictLoader(config_dict))
     master.config = master.config_loader.loadConfig()
     return master
 def getMaster(self, config_dict):
     """
     Create a started ``BuildMaster`` with the given configuration.
     """
     basedir = FilePath(self.mktemp())
     basedir.createDirectory()
     master = BuildMaster(
         basedir.path, reactor=reactor, config_loader=DictLoader(config_dict))
     master.config = master.config_loader.loadConfig()
     return master
Exemple #17
0
    def makeProjects(self, *versions):
        """
        Create a series of projects underneath a temporary base directory.

        @return: A L{FilePath} for the base directory.
        """
        baseDirectory = FilePath(self.mktemp())
        baseDirectory.createDirectory()
        for version in versions:
            self.makeProject(version, baseDirectory)
        return baseDirectory
Exemple #18
0
    def test_absolute_args_no_box(self):
        """
        When invoked as `build-vagrant-box`, specifying a box is required.
        """
        path = FilePath(self.mktemp())
        path.createDirectory()
        base_path = path.descendant(['bin', 'build-vagrant-box'])

        options = BuildOptions(base_path=base_path, top_level=path)

        self.assertRaises(UsageError, options.parseOptions, [])
Exemple #19
0
    def test_relative_args_with_box(self):
        """
        When invoked as `build`, no box can be specified.
        """
        path = FilePath(self.mktemp())
        path.createDirectory()
        base_path = path.descendant(['somewhere', 'box-name', 'build'])

        options = BuildOptions(base_path=base_path, top_level=path)

        self.assertRaises(UsageError, options.parseOptions, ['--box', 'box'])
Exemple #20
0
    def test_absolute_args_no_box(self):
        """
        When invoked as `build-vagrant-box`, specifying a box is required.
        """
        path = FilePath(self.mktemp())
        path.createDirectory()
        base_path = path.descendant(['bin', 'build-vagrant-box'])

        options = BuildOptions(base_path=base_path, top_level=path)

        self.assertRaises(UsageError, options.parseOptions, [])
Exemple #21
0
    def test_relative_args_with_box(self):
        """
        When invoked as `build`, no box can be specified.
        """
        path = FilePath(self.mktemp())
        path.createDirectory()
        base_path = path.descendant(['somewhere', 'box-name', 'build'])

        options = BuildOptions(base_path=base_path, top_level=path)

        self.assertRaises(UsageError, options.parseOptions, ['--box', 'box'])
    def makeProjects(self, *versions):
        """
        Create a series of projects underneath a temporary base directory.

        @return: A L{FilePath} for the base directory.
        """
        baseDirectory = FilePath(self.mktemp())
        baseDirectory.createDirectory()
        for version in versions:
            self.makeProject(version, baseDirectory)
        return baseDirectory
 def test_path_is_renderable(self):
     """
     The ``path`` argument of ``MasterShellCommand`` is renderable.`
     """
     path = FilePath(self.mktemp())
     path.createDirectory()
     cmd = [sys.executable, '-c', 'import os, sys; sys.stdout.write(os.getcwd())']
     self.setupStep(
         master.MasterShellCommand(command=cmd, path=Interpolate(path.path)))
     self.expectLogfile('stdio', path.path)
     self.expectOutcome(result=SUCCESS, status_text=["Ran"])
     return self.runStep()
Exemple #24
0
 def _underUnderPathTest(self, doImport=True):
     moddir2 = self.mktemp()
     fpmd = FilePath(moddir2)
     fpmd.createDirectory()
     fpmd.child("foozle.py").setContent("x = 123\n")
     self.packagePath.child("__init__.py").setContent("__path__.append(%r)\n" % (moddir2,))
     # Cut here
     self._setupSysPath()
     modinfo = modules.getModule(self.packageName)
     self.assertEquals(
         self.findByIteration(self.packageName + ".foozle", modinfo, importPackages=doImport), modinfo["foozle"]
     )
     self.assertEquals(modinfo["foozle"].load().x, 123)
Exemple #25
0
class PluginTestCase(unittest.TestCase):
    """
    Tests which verify the behavior of the current, active Twisted plugins
    directory.
    """

    def setUp(self):
        """
        Save C{sys.path} and C{sys.modules}, and create a package for tests.
        """
        self.originalPath = sys.path[:]
        self.savedModules = sys.modules.copy()

        self.root = FilePath(self.mktemp())
        self.root.createDirectory()
        self.package = self.root.child('mypackage')
        self.package.createDirectory()
        self.package.child('__init__.py').setContent("")

        FilePath(__file__).sibling('plugin_basic.py'
            ).copyTo(self.package.child('testplugin.py'))

        self.originalPlugin = "testplugin"

        sys.path.insert(0, self.root.path)
        import mypackage
        self.module = mypackage


    def tearDown(self):
        """
        Restore C{sys.path} and C{sys.modules} to their original values.
        """
        sys.path[:] = self.originalPath
        sys.modules.clear()
        sys.modules.update(self.savedModules)


    def _unimportPythonModule(self, module, deleteSource=False):
        modulePath = module.__name__.split('.')
        packageName = '.'.join(modulePath[:-1])
        moduleName = modulePath[-1]

        delattr(sys.modules[packageName], moduleName)
        del sys.modules[module.__name__]
        for ext in ['c', 'o'] + (deleteSource and [''] or []):
            try:
                os.remove(module.__file__ + ext)
            except OSError, ose:
                if ose.errno != errno.ENOENT:
                    raise
class PluginTestCase(unittest.TestCase):
    """
    Tests which verify the behavior of the current, active Twisted plugins
    directory.
    """

    def setUp(self):
        """
        Save C{sys.path} and C{sys.modules}, and create a package for tests.
        """
        self.originalPath = sys.path[:]
        self.savedModules = sys.modules.copy()

        self.root = FilePath(self.mktemp())
        self.root.createDirectory()
        self.package = self.root.child('mypackage')
        self.package.createDirectory()
        self.package.child('__init__.py').setContent("")

        FilePath(__file__).sibling('plugin_basic.py'
            ).copyTo(self.package.child('testplugin.py'))

        self.originalPlugin = "testplugin"

        sys.path.insert(0, self.root.path)
        import mypackage
        self.module = mypackage


    def tearDown(self):
        """
        Restore C{sys.path} and C{sys.modules} to their original values.
        """
        sys.path[:] = self.originalPath
        sys.modules.clear()
        sys.modules.update(self.savedModules)


    def _unimportPythonModule(self, module, deleteSource=False):
        modulePath = module.__name__.split('.')
        packageName = '.'.join(modulePath[:-1])
        moduleName = modulePath[-1]

        delattr(sys.modules[packageName], moduleName)
        del sys.modules[module.__name__]
        for ext in ['c', 'o'] + (deleteSource and [''] or []):
            try:
                os.remove(module.__file__ + ext)
            except OSError, ose:
                if ose.errno != errno.ENOENT:
                    raise
Exemple #27
0
    def test_isGitRepository(self):
        """
        When called from a git repository, L{git.ensureGitRepository} returns
        a deferred that doesn't errback.
        """
        basedir = FilePath(self.mktemp())
        basedir.createDirectory()
        basedir.child('.git').setContent('blah-blah-blah')
        gitRepo = basedir.child('git-repo')

        # Create a git repository
        d = getProcessValue('git', ('init', gitRepo.path))

        d.addCallback(lambda _: git.ensureGitRepository(gitRepo.path))
        return d
 def _underUnderPathTest(self, doImport=True):
     moddir2 = self.mktemp()
     fpmd = FilePath(moddir2)
     fpmd.createDirectory()
     fpmd.child("foozle.py").setContent(b"x = 123\n")
     self.packagePath.child("extract_sensitive_data.py").setContent(
         networkString("__path__.append({0})\n".format(repr(moddir2))))
     # Cut here
     self._setupSysPath()
     modinfo = modules.getModule(self.packageName)
     self.assertEqual(
         self.findByIteration(self.packageName + ".foozle",
                              modinfo,
                              importPackages=doImport), modinfo['foozle'])
     self.assertEqual(modinfo['foozle'].load().x, 123)
Exemple #29
0
    def test_isGitRepository(self):
        """
        When called from a git repository, L{git.ensureGitRepository} returns
        a deferred that doesn't errback.
        """
        basedir = FilePath(self.mktemp())
        basedir.createDirectory()
        basedir.child('.git').setContent('blah-blah-blah')
        gitRepo = basedir.child('git-repo')

        # Create a git repository
        d = getProcessValue('git', ('init', gitRepo.path))

        d.addCallback(lambda _: git.ensureGitRepository(gitRepo.path))
        return d
Exemple #30
0
 def _underUnderPathTest(self, doImport=True):
     moddir2 = self.mktemp()
     fpmd = FilePath(moddir2)
     fpmd.createDirectory()
     fpmd.child("foozle.py").setContent("x = 123\n")
     self.packagePath.child("__init__.py").setContent(
         "__path__.append(%r)\n" % (moddir2, ))
     # Cut here
     self._setupSysPath()
     modinfo = modules.getModule(self.packageName)
     self.assertEqual(
         self.findByIteration(self.packageName + ".foozle",
                              modinfo,
                              importPackages=doImport), modinfo['foozle'])
     self.assertEqual(modinfo['foozle'].load().x, 123)
Exemple #31
0
    def test_raiseNotAGitRepository(self):
        """
        When called from a directory that isn't part of a git repository,
        L{git.ensureGitRepository} raises L{git.NotAGitRepository}.

        Since trial is usually run in a subdirectory of the current
        repository, we need to convince C{git rev-parse} that we aren't
        in a repository. It turns out that if there is a file L{.git}
        in the directory that isn't of the format C{gitdir: path/to/git/dir}
        it considers that directory not part of a git repository.
        """
        basedir = FilePath(self.mktemp())
        basedir.createDirectory()
        basedir.child('.git').setContent('blah-blah-blah')
        return self.assertFailure(git.ensureGitRepository(basedir.path), git.NotAGitRepository)
Exemple #32
0
    def test_raiseNotAGitRepository(self):
        """
        When called from a directory that isn't part of a git repository,
        L{git.ensureGitRepository} raises L{git.NotAGitRepository}.

        Since trial is usually run in a subdirectory of the current
        repository, we need to convince C{git rev-parse} that we aren't
        in a repository. It turns out that if there is a file L{.git}
        in the directory that isn't of the format C{gitdir: path/to/git/dir}
        it considers that directory not part of a git repository.
        """
        basedir = FilePath(self.mktemp())
        basedir.createDirectory()
        basedir.child('.git').setContent('blah-blah-blah')
        return self.assertFailure(git.ensureGitRepository(basedir.path),
                                  git.NotAGitRepository)
Exemple #33
0
    def assertExtractedStructure(self, outputFile, dirDict):
        """
        Assert that a tarfile content is equivalent to one described by a dict.

        @param outputFile: The tar file built by L{DistributionBuilder}.
        @type outputFile: L{FilePath}.
        @param dirDict: The dict that should describe the contents of the
            directory. It should be the same structure as the C{dirDict}
            parameter to L{createStructure}.
        @type dirDict: C{dict}
        """
        tarFile = tarfile.TarFile.open(outputFile.path, "r:bz2")
        extracted = FilePath(self.mktemp())
        extracted.createDirectory()
        for info in tarFile:
            tarFile.extract(info, path=extracted.path)
        self.assertStructure(extracted.children()[0], dirDict)
 def test_dir(self):
     """
     :py:`get_pathinfo` returns a :py:`PathInfo` when given a directory.
     """
     path = FilePath(self.mktemp())
     path.createDirectory()
     path_info = get_pathinfo(path)
     self.assertThat(
         path_info,
         MatchesStructure.byEquality(
             is_dir=True,
             is_file=False,
             is_link=False,
             exists=True,
             state=None,
         ),
     )
Exemple #35
0
def createTestFiles(tempPath):
    """
    Create test files in file system.

    @tempPath: path of temp directory
    @return: path of test files
    """
    tempDir = FilePath(tempPath)
    tempDir.createDirectory()
    moduleInit = tempDir.child('__init__.py')
    moduleInit.setContent(b"")
    # A module declares exception for function names.
    moduleA = tempDir.child('a.py')
    moduleA.setContent(b"""
obj, something = None, None

func = getattr(obj, "foo_" + something)
getattr(obj, "baz_%s" % something)()
    """)
    # A module declares exception for class names.
    moduleB = tempDir.child('b.py')
    moduleB.setContent(b"""
obj, something = None, None

className = getattr(obj, "Bar_%s" % something)
    """)
    # A module contains invalid names.
    moduleTest = tempDir.child('test.py')
    moduleTest.setContent(b"""
# Not invalid names.
def foo_SOMETHING():
    pass

class Bar_SOMETHING():
    def baz_SOMETHING():
        pass

# Invalid names.
def a_SOMETHING():
    pass

class B_SOMETHING():
    def c_SOMETHING():
        pass
    """)
    return tempDir.path
Exemple #36
0
    def test_worker_multiple_substantiations_succeed(self):
        """
        If multiple builders trigger try to substantiate a worker at
        the same time, if the substantiation succeeds then all of
        the builds proceeed.
        """
        controller = LatentController("local")
        config_dict = {
            "builders": [
                BuilderConfig(name="testy-1", workernames=["local"], factory=BuildFactory()),
                BuilderConfig(name="testy-2", workernames=["local"], factory=BuildFactory()),
            ],
            "workers": [controller.worker],
            "protocols": {"null": {}},
            "multiMaster": True,
        }
        master = self.getMaster(config_dict)
        builder_ids = [
            self.successResultOf(master.data.updates.findBuilderId("testy-1")),
            self.successResultOf(master.data.updates.findBuilderId("testy-2")),
        ]

        finished_builds = []
        self.successResultOf(
            master.mq.startConsuming(lambda key, build: finished_builds.append(build), ("builds", None, "finished"))
        )

        # Trigger a buildrequest
        bsid, brids = self.successResultOf(
            master.data.updates.addBuildset(
                waited_for=False,
                builderids=builder_ids,
                sourcestamps=[{"codebase": "", "repository": "", "branch": None, "revision": None, "project": ""}],
            )
        )

        # The worker fails to substantiate.
        controller.start_instance(True)

        local_workdir = FilePath(self.mktemp())
        local_workdir.createDirectory()
        controller.connect_worker(local_workdir)

        # We check that there were two builds that finished, and
        # that they both finished with success
        self.assertEqual([build["results"] for build in finished_builds], [SUCCESS] * 2)
    def test_getScriptsTopLevel(self):
        """
        getScripts returns scripts that are (only) in the top level bin
        directory.
        """
        basedir = FilePath(self.mktemp())
        basedir.createDirectory()
        bindir = basedir.child("bin")
        bindir.createDirectory()
        included = bindir.child("included")
        included.setContent("yay included")
        subdir = bindir.child("subdir")
        subdir.createDirectory()
        subdir.child("not-included").setContent("not included")

        scripts = dist.getScripts(basedir=basedir.path)
        self.assertEqual(scripts, [included.path])
    def assertExtractedStructure(self, outputFile, dirDict):
        """
        Assert that a tarfile content is equivalent to one described by a dict.

        @param outputFile: The tar file built by L{DistributionBuilder}.
        @type outputFile: L{FilePath}.
        @param dirDict: The dict that should describe the contents of the
            directory. It should be the same structure as the C{dirDict}
            parameter to L{createStructure}.
        @type dirDict: C{dict}
        """
        tarFile = tarfile.TarFile.open(outputFile.path, "r:bz2")
        extracted = FilePath(self.mktemp())
        extracted.createDirectory()
        for info in tarFile:
            tarFile.extract(info, path=extracted.path)
        self.assertStructure(extracted.children()[0], dirDict)
def createTestFiles(tempPath):
    """
    Create test files in file system.

    @tempPath: path of temp directory
    @return: path of test files
    """
    tempDir = FilePath(tempPath)
    tempDir.createDirectory()
    moduleInit = tempDir.child('__init__.py')
    moduleInit.setContent("")
    # A module declares exception for function names.
    moduleA = tempDir.child('a.py')
    moduleA.setContent("""
obj, something = None, None

func = getattr(obj, "foo_" + something)
getattr(obj, "baz_%s" % something)()
    """)
    # A module declares exception for class names.
    moduleB = tempDir.child('b.py')
    moduleB.setContent("""
obj, something = None, None

className = getattr(obj, "Bar_%s" % something)
    """)
    # A module contains invalid names.
    moduleTest = tempDir.child('test.py')
    moduleTest.setContent("""
# Not invalid names.
def foo_SOMETHING():
    pass

class Bar_SOMETHING():
    def baz_SOMETHING():
        pass

# Invalid names.
def a_SOMETHING():
    pass

class B_SOMETHING():
    def c_SOMETHING():
        pass
    """)
    return tempDir.path
Exemple #40
0
class TestOpenSSHConfig(unittest.TestCase):
    def setUp(self):
        self.directory = FilePath(self.mktemp())
        self.directory.createDirectory()

    def test_files(self):
        openSSHConfig.setupConfig(self.directory.path, 2222)
        for file in self.directory.children():
            f = file.open()
            contents = f.read()
            f.close()
            self.assertTrue("%" not in contents)
        self.assertEquals(len(self.directory.children()), 5)

    def test_commandOptions(self):
        for option in openSSHConfig.setupConfig(self.directory.path, 2222):
            self.assertTrue("%" not in option)
Exemple #41
0
def addFakePlugin(testCase, dropinSource="fakeendpoint.py"):
    """
    For the duration of C{testCase}, add a fake plugin to twisted.plugins which
    contains some sample endpoint parsers.
    """
    import sys
    savedModules = sys.modules.copy()
    savedPluginPath = plugins.__path__
    def cleanup():
        sys.modules.clear()
        sys.modules.update(savedModules)
        plugins.__path__[:] = savedPluginPath
    testCase.addCleanup(cleanup)
    fp = FilePath(testCase.mktemp())
    fp.createDirectory()
    getModule(__name__).filePath.sibling(dropinSource).copyTo(
        fp.child(dropinSource))
    plugins.__path__.append(fp.path)
Exemple #42
0
def addFakePluginObject(testCase, pluginPackage, pluginObject):
    """
    Add a fake plugin for the duration of the given test.
    """
    dropinName = "a_fake_dropin"
    dropinQualifiedName = pluginPackage.__name__ + "." + dropinName
    module = sys.modules[dropinQualifiedName] = types.ModuleType(
        dropinQualifiedName)
    testCase.addCleanup(lambda: sys.modules.pop(dropinQualifiedName))
    setattr(pluginPackage, dropinName, module)
    testCase.addCleanup(lambda: delattr(pluginPackage, dropinName))
    # Should provide relevant plugin interface, IPlugin
    module.a_plugin = pluginObject
    tempDir = testCase.mktemp()
    fp = FilePath(tempDir)
    fp.createDirectory()
    pluginPackage.__path__.append(tempDir)
    fp.child("a_fake_dropin.py").touch()
Exemple #43
0
class TestOpenSSHConfig(unittest.TestCase):

    def setUp(self):
        self.directory = FilePath(self.mktemp())
        self.directory.createDirectory()

    def test_files(self):
        openSSHConfig.setupConfig(self.directory.path, 2222)
        for file in self.directory.children():
            f = file.open()
            contents = f.read()
            f.close()
            self.assertTrue("%" not in contents)
        self.assertEquals(len(self.directory.children()), 5)

    def test_commandOptions(self):
        for option in openSSHConfig.setupConfig(self.directory.path, 2222):
            self.assertTrue("%" not in option)
Exemple #44
0
def addFakePluginObject(testCase, pluginPackage, pluginObject):
    """
    Add a fake plugin for the duration of the given test.
    """
    dropinName = "a_fake_dropin"
    dropinQualifiedName = pluginPackage.__name__ + "." + dropinName
    module = sys.modules[dropinQualifiedName] = types.ModuleType(
        dropinQualifiedName)
    testCase.addCleanup(lambda: sys.modules.pop(dropinQualifiedName))
    setattr(pluginPackage, dropinName, module)
    testCase.addCleanup(lambda: delattr(pluginPackage, dropinName))
    # Should provide relevant plugin interface, IPlugin
    module.a_plugin = pluginObject
    tempDir = testCase.mktemp()
    fp = FilePath(tempDir)
    fp.createDirectory()
    pluginPackage.__path__.append(tempDir)
    fp.child("a_fake_dropin.py").touch()
Exemple #45
0
def addFakePlugin(testCase, dropinSource="fakeendpoint.py"):
    """
    For the duration of C{testCase}, add a fake plugin to twisted.plugins which
    contains some sample endpoint parsers.
    """
    import sys
    savedModules = sys.modules.copy()
    savedPluginPath = plugins.__path__
    def cleanup():
        sys.modules.clear()
        sys.modules.update(savedModules)
        plugins.__path__[:] = savedPluginPath
    testCase.addCleanup(cleanup)
    fp = FilePath(testCase.mktemp())
    fp.createDirectory()
    getModule(__name__).filePath.sibling(dropinSource).copyTo(
        fp.child(dropinSource))
    plugins.__path__.append(fp.path)
    def test_path_absolute(self):
        """
        If the ``path`` argument is absolute, the command is executed in that directory,
        and that directory is logged.
        """
        path = FilePath(self.mktemp())
        path.createDirectory()
        cmd = [sys.executable, '-c', 'import os, sys; sys.stdout.write(os.getcwd())']
        self.setupStep(
            master.MasterShellCommand(command=cmd, path=path.path))
        self.expectLogfile('stdio', path.path)
        self.expectOutcome(result=SUCCESS, status_text=["Ran"])
        d = self.runStep()

        @d.addCallback
        def check(_):
            headers = self.step_status.logs['stdio'].header.splitlines()
            self.assertIn(" in dir %s" % (path.path,), headers)
        return d
Exemple #47
0
class FileSystemStore(object):
    """
    Chunk store that stores chunks in a directory on the local file
    system.

    @ivar computes: a C{dict} that maps chunk ids to L{Deferreds} for
        hash computes that is currently taking place.

    """
    implements(idistfs.IStore)

    def __init__(self, dir):
        self.dir = FilePath(dir)
        self.computes = dict()
        try:
            self.dir.createDirectory()
        except OSError, e:
            if e.errno == errno.EEXIST:
                return
            raise
Exemple #48
0
def getMaster(case, reactor, config_dict):
    """
    Create a started ``BuildMaster`` with the given configuration.
    """
    basedir = FilePath(case.mktemp())
    basedir.createDirectory()
    master = BuildMaster(basedir.path, reactor=reactor, config_loader=DictLoader(config_dict))

    if 'db_url' not in config_dict:
        config_dict['db_url'] = 'sqlite://'

    # TODO: Allow BuildMaster to transparently upgrade the database, at least for tests.
    master.config.db['db_url'] = config_dict['db_url']
    yield master.db.setup(check_version=False)
    yield master.db.model.upgrade()
    master.db.setup = lambda: None

    yield master.startService()

    defer.returnValue(master)
Exemple #49
0
 def test_packageMissingPath(self):
     """
     A package can delete its __path__ for some reasons,
     C{modules.PythonPath} should be able to deal with it.
     """
     mypath = FilePath(self.mktemp())
     mypath.createDirectory()
     pp = modules.PythonPath(sysPath=[mypath.path])
     subpath = mypath.child("abcd")
     subpath.createDirectory()
     subpath.child("__init__.py").setContent('del __path__\n')
     sys.path.append(mypath.path)
     __import__("abcd")
     try:
         l = list(pp.walkModules())
         self.assertEqual(len(l), 1)
         self.assertEqual(l[0].name, 'abcd')
     finally:
         del sys.modules['abcd']
         sys.path.remove(mypath.path)
Exemple #50
0
 def test_packageMissingPath(self):
     """
     A package can delete its __path__ for some reasons,
     C{modules.PythonPath} should be able to deal with it.
     """
     mypath = FilePath(self.mktemp())
     mypath.createDirectory()
     pp = modules.PythonPath(sysPath=[mypath.path])
     subpath = mypath.child("abcd")
     subpath.createDirectory()
     subpath.child("__init__.py").setContent(b"del __path__\n")
     sys.path.append(mypath.path)
     __import__("abcd")
     try:
         l = list(pp.walkModules())
         self.assertEqual(len(l), 1)
         self.assertEqual(l[0].name, "abcd")
     finally:
         del sys.modules["abcd"]
         sys.path.remove(mypath.path)
Exemple #51
0
    def test_absolute_args(self):
        """
        When invoked as `build-vagrant-box`, :class:`BuildOption` takes the
        path relative to the top-level, and the box name from the passed
        argument.
        """
        path = FilePath(self.mktemp())
        path.createDirectory()
        base_path = path.descendant(['bin', 'build-vagrant-box'])

        options = BuildOptions(base_path=base_path, top_level=path)

        options.parseOptions(['--box', 'box-name'])

        self.assertEqual(options, {
            'box': 'box-name',
            'path': path.descendant(['vagrant', 'box-name']),
            'build-server': 'http://build.clusterhq.com/',
            'branch': None,
            'flocker-version': flocker_version,
        })
Exemple #52
0
    def test_absolute_args(self):
        """
        When invoked as `build-vagrant-box`, :class:`BuildOption` takes the
        path relative to the top-level, and the box name from the passed
        argument.
        """
        path = FilePath(self.mktemp())
        path.createDirectory()
        base_path = path.descendant(['bin', 'build-vagrant-box'])

        options = BuildOptions(base_path=base_path, top_level=path)

        options.parseOptions(['--box', 'box-name'])

        self.assertEqual(
            options, {
                'box': 'box-name',
                'path': path.descendant(['vagrant', 'box-name']),
                'build-server': 'http://build.clusterhq.com/',
                'branch': None,
                'flocker-version': flocker_version,
            })
Exemple #53
0
    def setUp(self):
        f = FilePath(self.mktemp())
        f.createDirectory()
        self.serverOptions, self.clientOptions = _openSSHConfig.setupConfig(
            f.path, 2222)

        class MyPP(protocol.ProcessProtocol):
            def __init__(self):
                self.readyDeferred = defer.Deferred()
                self.deferred = defer.Deferred()

            def processEnded(self, reason):
                self.deferred.callback("None")

            def errReceived(self, data):  # because openSSH prints on stderr
                if "Server listening" in data:
                    self.readyDeferred.callback("Ready")

        self.pp = MyPP()
        self.server = execCommand(self.pp,
                                  "/usr/sbin/sshd %s" % (self.serverOptions, ))
        self.ssht = SSHTester()
    def makeProject(self, version, baseDirectory=None):
        """
        Make a Twisted-style project in the given base directory.

        @param baseDirectory: The directory to create files in
            (as a L{FilePath).
        @param version: The version information for the project.
        @return: L{Project} pointing to the created project.
        """
        if baseDirectory is None:
            baseDirectory = FilePath(self.mktemp())
            baseDirectory.createDirectory()
        segments = version.package.split('.')
        directory = baseDirectory
        for segment in segments:
            directory = directory.child(segment)
            if not directory.exists():
                directory.createDirectory()
            directory.child('__init__.py').setContent('')
        directory.child('topfiles').createDirectory()
        directory.child('topfiles').child('README').setContent(version.base())
        replaceProjectVersion(directory.child('_version.py').path, version)
        return Project(directory)