def main(argv):
    # input
    posts = FilePath(argv[1])

    # output
    blog = FilePath(argv[2])

    # Since Sphinx gets confused by image paths with "special" characters in
    # them, generate new names for all the image paths and a mapping from the
    # old names to the new names.
    images = FilePath(argv[3])

    imagepaths = []
    for post in images.children():
        if post.isdir():
            imagepaths.append(post)
            safe = post.sibling(fixpath(post.basename()))
            if post != safe and not safe.isdir():
                post.moveTo(safe)
                safe.linkTo(post)

    entries = []
    for post in posts.children():
        data = post.getContent().decode("utf-8")
        ignored, header, body = data.split(b"---", 2)
        meta = dict((text.strip() for text in line.split(":", 1)) for line in header.splitlines() if line.strip())
        date = datetime.strptime(meta["date"], "%Y/%m/%d %H:%M:%S")

        parent = blog.preauthChild(
            ("%d/%02d/%02d" % (date.year, date.month, date.day)).encode("utf-8"))
        title = fixpath(meta["title"].strip().lower().encode("utf-8")).decode("utf-8")
        entry = parent.child((title + ".rst").encode("utf-8"))

        header = HEADER_TEMPLATE % dict(
            author=meta["author"].strip(), categories="none",
            tags=meta["categories"].strip(), title=meta["title"].strip(),
            underbar="=" * len(meta["title"].strip()))

        for path in imagepaths:
            body = body.replace(
                u"/" + path.basename().decode("utf-8") + u"/",
                u"/" + fixpath(path.basename()).decode("utf-8") + u"/")

        if not parent.isdir():
            parent.makedirs()

        entry.setContent((header + html2rst(body)).encode("utf-8"))

        entries.append(entry)

    entries.sort()
    entries.reverse()

    sitemap = SITEMAP % dict(
        entries="".join([
                "\n   " + "/".join(entry.segmentsFrom(blog))
                for entry in entries]))
    blog.child(b"master.rst").setContent(sitemap.encode("utf-8"))

    FilePath(b"conf.py").copyTo(blog.child(b"conf.py"))
示例#2
0
    def test_list_with_missing_image(self):
        """
        ``DockerClient.list()`` can list containers whose image is missing.

        The resulting output may be inaccurate, but that's OK: this only
        happens for non-running containers, who at worst we're going to
        restart anyway.
        """
        path = FilePath(self.mktemp())
        path.makedirs()
        path.child(b"Dockerfile.in").setContent(
            b"FROM busybox\nCMD /bin/true\n")
        image_name = DockerImageBuilder(test=self, source_dir=path,
                                        cleanup=False).build()
        name = random_name(self)
        d = self.start_container(unit_name=name, image_name=image_name,
                                 expected_states=(u'inactive',))

        def stopped_container_exists(_):
            # Remove the image:
            docker_client = Client()
            docker_client.remove_image(image_name, force=True)

            # Should be able to still list the container:
            client = self.make_client()
            listed = client.list()
            listed.addCallback(lambda results: self.assertIn(
                (name, "inactive"),
                [(unit.name, unit.activation_state) for unit in results]))
            return listed
        d.addCallback(stopped_container_exists)
        return d
示例#3
0
    def test_json_to_yaml(self):
        """
        Running `crossbar convert` with a YAML config file will convert it to
        JSON.
        """
        cbdir = FilePath(self.mktemp())
        cbdir.makedirs()
        config_file = cbdir.child("config.json")
        config_file.setContent(b"""{
   "foo": {
      "bar": "spam",
      "baz": {
         "foo": "cat"
      }
   }
}""")

        cli.run("crossbar",
                ["convert", "--config={}".format(config_file.path)])

        self.assertIn(
            ("YAML formatted configuration written"),
            self.stdout.getvalue())

        with open(cbdir.child("config.yaml").path) as f:
            self.assertEqual(f.read(), """foo:
  bar: spam
  baz:
    foo: cat
""")
示例#4
0
def setupConfig(directoryPath, port):
    f = FilePath(directoryPath)
    hostkey = f.child('hostkey')
    hostkey.setContent(serverPrivKey)
    os.chmod(hostkey.path, 0600)
    knownHosts = f.child('clientID.pub')
    knownHosts.setContent(clientPubKey)
    clientID = f.child('clientID')
    clientID.setContent(clientPrivKey)
    os.chmod(clientID.path, 0600)

    hostname = 'localhost.sftp.experimental'

    sshdConfigFile = f.child("sshd_config")
    sshdConfigFile.setContent(sshd_config % {
            'port': port,
            'hostkeyFile': hostkey.path,
            'clientPubkeyFile': knownHosts.path})

    sshConfigFile = f.child("ssh_config")
    sshConfigFile.setContent(ssh_config % {
            'port': port,
            'clientPrivkeyFile': clientID.path,
            'hostname': hostname})

    serverOptions = "-de -f %s" % sshdConfigFile.path
    clientOptions = "-F %s %s" % (sshConfigFile.path, hostname)

    return (serverOptions, clientOptions)
示例#5
0
文件: test_util.py 项目: iffy/ppd
 def test_multipleActions(self):
     """
     Multiple actions can be specified for each rule.  Each action should
     happen.
     """
     tmpdir = FilePath(self.mktemp())
     rules = [
         {
             'pattern': {
                 'foo': '*',
             },
             'actions': [
                 {'merge_yaml': '{foo}.yml'},
                 {'merge_yaml': '{foo}2.yml'},
             ]
         },
     ]
     dumper = RuleBasedFileDumper(tmpdir.path, rules)
     dumper.dumpObject({
         'foo': 'thefoo',
     })
     self.assertTrue(tmpdir.child('thefoo.yml').exists(),
         "Should have matched and acted on the first rule first action")
     self.assertTrue(tmpdir.child('thefoo2.yml').exists(),
         "Should have matched and acted on the first rule second action")
示例#6
0
def renderXHTML(markup, tempDir=None, css2xslfo=css2xslfo, fop=fop):
    """
    Render an I{XHTML} document to a I{PDF} document.

    @type  markup: L{str}
    @param markup: I{XHTML} document, encoded as UTF-8, that includes
        stylesheet information.

    @rtype: L{Deferred} firing with L{str}
    @return: Deferred that fires with the generated I{PDF} byte data.
    """
    def _removeTemp(ignored):
        tempDir.remove()

    if tempDir is None:
        tempDir = FilePath(tempfile.mkdtemp())
    xhtmlPath = tempDir.child('input.html')
    xhtmlPath.setContent(markup)
    xslfoPath = tempDir.child('output.fo')
    pdfPath = tempDir.child('output.pdf')

    configPath = FOP_CONFIG
    if not configPath.exists():
        configPath = None

    d = css2xslfo(xhtmlPath, xslfoPath)
    d.addCallback(lambda ignored: fop(xslfoPath, pdfPath, configPath))
    d.addCallback(defertee, _removeTemp)
    return d
示例#7
0
 def test_load_error_on_unreadable_key_file(self):
     """
     A ``PathError`` is raised if the key file path given to
     ``CertificateAuthority.from_path`` cannot be opened for reading.
     """
     path = FilePath(self.mktemp())
     path.makedirs()
     crt_path = path.child(b"cluster.crt")
     crt_file = crt_path.open(b'w')
     crt_file.write(b"dummy")
     crt_file.close()
     key_path = path.child(b"cluster.key")
     key_file = key_path.open(b'w')
     key_file.write(b"dummy")
     key_file.close()
     # make file unreadable
     key_path.chmod(64)
     e = self.assertRaises(
         PathError, CertificateAuthority.from_path, path
     )
     expected = (
         b"Private key file {path} could not be opened. "
         b"Check file permissions."
     ).format(path=key_path.path)
     self.assertEqual(str(e), expected)
示例#8
0
文件: test_util.py 项目: iffy/ppd
 def test_catchAll(self):
     """
     Everything should match a catchall rule.
     """
     tmpdir = FilePath(self.mktemp())
     rules = [
         {
             'pattern': {
                 'foo': '*',
             },
             'actions': [
                 {'merge_yaml': '{foo}.yml'},
             ]
         },
         {
             'pattern': 'all',
             'actions': [
                 {'merge_yaml': 'extra.yml'},
             ]
         }
     ]
     dumper = RuleBasedFileDumper(tmpdir.path, rules)
     dumper.dumpObject({
         'foo': 'thefoo',
     })
     self.assertTrue(tmpdir.child('thefoo.yml').exists(),
         "Should have matched and acted on the first rule")
     dumper.dumpObject({
         'bar': 'hey',
     })
     self.assertTrue(tmpdir.child('extra.yml').exists(),
         "Should have matched and acted on the second rule")
     self.assertEqual(len(tmpdir.children()), 2, "Should only have made "
         "the 2 expected files")
 def test_saveKeysha256(self):
     """
     L{_saveKey} will generate key fingerprint in
     L{FingerprintFormats.SHA256-BASE64} format if explicitly specified.
     """
     base = FilePath(self.mktemp())
     base.makedirs()
     filename = base.child('id_rsa').path
     key = Key.fromString(privateRSA_openssh)
     _saveKey(key, {'filename': filename, 'pass': '******',
         'format': 'sha256-base64'})
     self.assertEqual(
         self.stdout.getvalue(),
         "Your identification has been saved in %s\n"
         "Your public key has been saved in %s.pub\n"
         "The key fingerprint in <FingerprintFormats=SHA256_BASE64> is:\n"
         "ryaugIFT0B8ItuszldMEU7q14rG/wj9HkRosMeBWkts=\n" % (
             filename,
             filename))
     self.assertEqual(
         key.fromString(
             base.child('id_rsa').getContent(), None, 'passphrase'),
         key)
     self.assertEqual(
         Key.fromString(base.child('id_rsa.pub').getContent()),
         key.public())
 def test_saveKeyECDSA(self):
     """
     L{_saveKey} writes the private and public parts of a key to two
     different files and writes a report of this to standard out.
     Test with ECDSA key.
     """
     base = FilePath(self.mktemp())
     base.makedirs()
     filename = base.child('id_ecdsa').path
     key = Key.fromString(privateECDSA_openssh)
     _saveKey(key, {'filename': filename, 'pass': '******',
         'format': 'md5-hex'})
     self.assertEqual(
         self.stdout.getvalue(),
         "Your identification has been saved in %s\n"
         "Your public key has been saved in %s.pub\n"
         "The key fingerprint in <FingerprintFormats=MD5_HEX> is:\n"
         "e2:3b:e8:1c:f8:c9:c7:de:8b:c0:00:68:2e:c9:2c:8a\n" % (
             filename,
             filename))
     self.assertEqual(
         key.fromString(
             base.child('id_ecdsa').getContent(), None, 'passphrase'),
         key)
     self.assertEqual(
         Key.fromString(base.child('id_ecdsa.pub').getContent()),
         key.public())
示例#11
0
	def _makeTree(self):
		parent = FilePath(self.mktemp())
		parent.makedirs()
		sub = parent.child('sub')
		sub.makedirs()
		subsub = sub.child('sub sub')
		subsub.makedirs()

		parent.child('one.png').setContent("one")
		sub.child("two.png").setContent("two")
		subsub.child("three.png").setContent("three")

		t = {}
		t['md5one'] = hashlib.md5("one").hexdigest()
		t['md5two'] = hashlib.md5("two").hexdigest()
		t['md5three'] = hashlib.md5("three").hexdigest()
		t['md5replacement'] = hashlib.md5("replacement").hexdigest()

		temp = sub.child('style.css')
		original = """\
div { background-image: url(http://127.0.0.1/not-modified.png); }
td { background-image: url(https://127.0.0.1/not-modified.png); }
p { background-image: url(../one.png); }
q { background-image: url(two.png); }
b { background-image: url(sub%20sub/three.png); }
i { background-image: url(/sub/sub%20sub/three.png); }
"""
		temp.setContent(original)
		t['md5original'] = hashlib.md5(original).hexdigest()

		return parent, t
    def make_site(self):
        p = FilePath(self.mktemp().decode("ascii"))
        p.makedirs()
        chargebee_secret = p.child(b"chargebee-key.secret")
        chargebee_secret.setContent(b"foo")
        stripe_publishable = p.child(b"stripe-key.publishable")
        stripe_publishable.setContent(b"bar")
        stripe_secret = p.child(b"stripe-key.secret")
        stripe_secret.setContent(b"baz")

        options = SiteOptions(self.reactor)
        options.parseOptions([
            b"--chargebee-domain", b"localhost",
            b"--chargebee-site-name", b"some-site",
            b"--chargebee-secret-api-key-path", chargebee_secret.path,
            b"--chargebee-plan-id", b"foo-bar",
            b"--chargebee-gateway-account-id", b"gw_baz",
            b"--stripe-publishable-api-key-path", stripe_publishable.path,
            b"--site-logs-path", b"httpd.log",
            b"--secure-port", b"tcp:0",
            b"--subscription-manager", b"http://127.0.0.1:8888/",
            b"--cross-domain", self.origin,
            b"--wormhole-result-path", self.mktemp(),
            b"--signup-failure-address", u"*****@*****.**",
        ])
        return site_for_options(self.reactor, options)
示例#13
0
文件: test_cli.py 项目: iffy/grace
    def test_switch(self):
        """
        Switch should work
        """
        runner = Runner()

        # I'm getting AF_UNIX path too long errors using self.mktemp()
        base = FilePath(tempfile.mkdtemp())
        log.msg('tmpdir: %r' % base.path)
        root = base.child('root')
        src = base.child('src')
        dst = base.child('dst')
        
        _ = yield runner.start(root.path, 'unix:'+src.path, 'unix:'+dst.path)
        
        pidfile = root.child('grace.pid')
        pid = pidfile.getContent()
        self.addCleanup(self.kill, pid)
        r = yield runner.switch(root.path, 'unix:'+src.path, 'unix:/foo')
        r = yield runner.ls(root.path)
        self.assertEqual(r, [
            {
                'src': 'unix:'+src.path,
                'dst': 'unix:/foo',
                'conns': 0,
                'active': True,
            }
        ], "Should have switched")
示例#14
0
    def test_renderFiltered(self):
        """
        L{static.DirectoryListerTest} takes a optional C{dirs} argument that
        filter out the list of of directories and files printed.
        """
        path = FilePath(self.mktemp())
        path.makedirs()
        path.child('dir1').makedirs()
        path.child('dir2').makedirs()
        path.child('dir3').makedirs()
        lister = static.DirectoryLister(path.path, dirs=["dir1", "dir3"])
        data = lister.render(self._request('foo'))
        body = """<tr class="odd">
    <td><a href="dir1/">dir1/</a></td>
    <td></td>
    <td>[Directory]</td>
    <td></td>
</tr>
<tr class="even">
    <td><a href="dir3/">dir3/</a></td>
    <td></td>
    <td>[Directory]</td>
    <td></td>
</tr>"""
        self.assertIn(body, data)
示例#15
0
    def test_null_environment(self):
        """
        A container that does not include any environment variables contains
        an empty ``environment`` in the return ``Unit``.
        """
        docker_dir = FilePath(self.mktemp())
        docker_dir.makedirs()
        docker_dir.child(b"Dockerfile").setContent(
            b"FROM scratch\n" b"MAINTAINER [email protected]\n" b'CMD ["/bin/doesnotexist"]'
        )
        name = random_name(self)
        image = DockerImageBuilder(test=self, source_dir=docker_dir)
        d = image.build()

        def image_built(image_name):
            client = self.make_client()
            self.create_container(client, name, image_name)
            self.addCleanup(client.remove, name)
            return client.list()

        d.addCallback(image_built)

        def got_list(units):
            unit = [unit for unit in units if unit.name == name][0]
            self.assertIsNone(unit.environment)

        d.addCallback(got_list)
        return d
示例#16
0
文件: test_cli.py 项目: iffy/grace
    def test_start_waits(self):
        """
        Start should not return until it's actually working.
        """
        runner = Runner()
        
        base = FilePath(tempfile.mkdtemp())
        root = base.child('root')
        src = base.child('src')
        dst = base.child('dst')
        
        _ = yield runner.start(root.path, 'unix:'+src.path, 'unix:'+dst.path)

        self.assertTrue(root.child('grace.pid').exists(), "Should have a pid")
        pid = root.child('grace.pid').getContent().strip()
        self.addCleanup(self.kill, pid)
        self.assertTrue(root.child('grace.socket').exists(), "Should have a "
                        "socket")
        
        self.assertTrue(root.exists(), "Should have made the root dir")
        tac = root.child('grace.tac')
        self.assertTrue(tac.exists(), "Should have made grace.tac")
        self.assertEqual(tac.getContent(),
            getTac(('unix:'+src.path, 'unix:'+dst.path)),
            "Should have made the tac file using getTac")
示例#17
0
文件: test_cli.py 项目: iffy/grace
    def test_stop(self):
        """
        Stop will stop a running process.
        """
        runner = Runner()

        # I'm getting AF_UNIX path too long errors using self.mktemp()
        base = FilePath(tempfile.mkdtemp())
        log.msg('tmpdir: %r' % base.path)
        root = base.child('root')
        src = base.child('src')
        dst = base.child('dst')
        
        _ = yield runner.start(root.path, 'unix:'+src.path, 'unix:'+dst.path)
        
        pidfile = root.child('grace.pid')
        pid = pidfile.getContent()
        self.addCleanup(self.kill, pid)
        _ = yield runner.stop(root.path)

        # tail the log until you see Server Shut Down
        # XXX stop should maybe do the same... so that it doesn't return until
        # the process has actually stopped.
        logfile = root.child('grace.log')
        self.assertTrue(logfile.exists())
        _ = yield self.tailUntil(logfile.path, 'Server Shut Down.')

        self.assertFalse(pidfile.exists(), "pidfile should be gone: %r" % pidfile.path)
示例#18
0
    def test_alwaysPreferPy(self):
        """
        Verify that .py files will always be preferred to .pyc files, regardless of
        directory listing order.
        """
        mypath = FilePath(self.mktemp())
        mypath.createDirectory()
        pp = modules.PythonPath(sysPath=[mypath.path])
        originalSmartPath = pp._smartPath

        def _evilSmartPath(pathName):
            o = originalSmartPath(pathName)
            originalChildren = o.children

            def evilChildren():
                # normally this order is random; let's make sure it always
                # comes up .pyc-first.
                x = originalChildren()
                x.sort()
                x.reverse()
                return x

            o.children = evilChildren
            return o

        mypath.child("abcd.py").setContent("\n")
        compileall.compile_dir(mypath.path, quiet=True)
        # sanity check
        self.assertEquals(len(mypath.children()), 2)
        pp._smartPath = _evilSmartPath
        self.assertEquals(pp["abcd"].filePath, mypath.child("abcd.py"))
示例#19
0
文件: test_lore.py 项目: 0004c/VTK
    def test_getProcessor(self):

        base = FilePath(self.mktemp())
        base.makedirs()
        input = base.child("simple3.html")
        FilePath(__file__).sibling("simple3.html").copyTo(input)

        options = { 'template': sp('template.tpl'), 'ext': '.xhtml', 'baseurl': 'burl',
                    'filenameMapping': None }
        p = process.getProcessor(default, "html", options)
        p(input.path, self.linkrel)
        self.assertXMLEqual(
            """\
<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
  <head><title>Twisted Documentation: My Test Lore Input</title></head>
  <body bgcolor="white">
    <h1 class="title">My Test Lore Input</h1>
    <div class="content">
<span/>
<p>A Body.</p>
</div>
    <a href="index.xhtml">Index</a>
  </body>
</html>""",
            base.child("simple3.xhtml").getContent())
示例#20
0
文件: test_dvol.py 项目: lalyos/dvol
class VoluminousTests(TestCase):
    def setUp(self):
        self.tmpdir = FilePath(self.mktemp())
        self.tmpdir.makedirs()

    def test_create_volume(self):
        dvol = VoluminousOptions()
        dvol.parseOptions(ARGS + ["-p", self.tmpdir.path, "init", "foo"])
        self.assertTrue(self.tmpdir.child("foo").exists())
        self.assertTrue(self.tmpdir.child("foo").child("branches")
                .child("master").exists())
        self.assertEqual(dvol.voluminous.getOutput()[-1],
                "Created volume foo\nCreated branch foo/master")
        # Verify operation with `list`
        dvol.parseOptions(ARGS + ["-p", self.tmpdir.path, "list"])
        header, rest = self._parse_list_output(dvol)
        expected_volumes = [["*", "foo", "master"]]
        self.assertEqual(
            sorted(expected_volumes),
            sorted(rest),
        )

    def test_create_volume_already_exists(self):
        dvol = VoluminousOptions()
        # Create the repository twice, second time should have the error
        expected_output = "Error: volume foo already exists"
        dvol.parseOptions(ARGS + ["-p", self.tmpdir.path, "init", "foo"])
        try:
            dvol.parseOptions(ARGS + ["-p", self.tmpdir.path, "init", "foo"])
            self.assertEqual(dvol.voluminous.getOutput()[-1], expected_output)
        except CalledProcessErrorWithOutput, error:
            self.assertIn(expected_output, error.original.output)
            self.assertTrue(error.original.returncode != 0)
示例#21
0
    def test_inputNewVersionWithDefault(self):
        """
        L{inputNewVersionWithDefault} should prompt for a new version number,
        using C{raw_input}, finding the current version number in a I{NEWS.txt}
        file in the grandparent of the C{initPath} of the project it is passed
        and supplying that as a default.
        """
        projectPath = FilePath(self.mktemp()).child('FakeProject')
        projectPath.makedirs()
        projectPath.child('NEWS.txt').setContent('0.9.99')

        packagePath = projectPath.child('fakeproject')
        initPath = packagePath.child('__init__.py')
        project = Project(name="FakeProject", initPath=initPath,
                          package=None, version=None)

        def checkPrompt(prompt):
            self.assertEqual(prompt, "New version for FakeProject (default 0.9.99)? ")
            return ""

        self.assertEqual(
            inputNewVersionWithDefault(project, raw_input=checkPrompt),
            (0, 9, 99))
        self.assertEqual(
            inputNewVersionWithDefault(project, raw_input=lambda _: "6.7.89"),
            (6, 7, 89))
示例#22
0
文件: test_ckeygen.py 项目: 0004c/VTK
 def test_saveKey(self):
     """
     L{_saveKey} writes the private and public parts of a key to two
     different files and writes a report of this to standard out.
     """
     base = FilePath(self.mktemp())
     base.makedirs()
     filename = base.child('id_rsa').path
     key = Key.fromString(privateRSA_openssh)
     _saveKey(
         key.keyObject,
         {'filename': filename, 'pass': '******'})
     self.assertEqual(
         self.stdout.getvalue(),
         "Your identification has been saved in %s\n"
         "Your public key has been saved in %s.pub\n"
         "The key fingerprint is:\n"
         "3d:13:5f:cb:c9:79:8a:93:06:27:65:bc:3d:0b:8f:af\n" % (
             filename,
             filename))
     self.assertEqual(
         key.fromString(
             base.child('id_rsa').getContent(), None, 'passphrase'),
         key)
     self.assertEqual(
         Key.fromString(base.child('id_rsa.pub').getContent()),
         key.public())
示例#23
0
    def test_add_with_environment(self):
        """
        ``DockerClient.add`` accepts an environment object whose ID and
        variables are used when starting a docker image.
        """
        docker_dir = FilePath(self.mktemp())
        docker_dir.makedirs()
        docker_dir.child(b"Dockerfile").setContent(
            b"FROM busybox\n" b'CMD ["/bin/sh",  "-c", ' b'"while true; do env && echo WOOT && sleep 1; done"]'
        )
        expected_variables = frozenset({"key1": "value1", "key2": "value2"}.items())
        unit_name = random_name(self)

        image = DockerImageBuilder(test=self, source_dir=docker_dir)
        d = image.build()

        def image_built(image_name):
            return self.start_container(
                unit_name=unit_name, image_name=image_name, environment=Environment(variables=expected_variables)
            )

        d.addCallback(image_built)

        def started(_):
            output = ""
            while True:
                output += Client().logs(self.namespacing_prefix + unit_name)
                if "WOOT" in output:
                    break
            assertContainsAll(output, test_case=self, needles=["{}={}\n".format(k, v) for k, v in expected_variables])

        d.addCallback(started)
        return d
示例#24
0
 def setupJobdir(self):
     jobdir = FilePath(self.mktemp())
     jobdir.createDirectory()
     self.jobdir = jobdir.path
     for sub in 'new', 'tmp', 'cur':
         jobdir.child(sub).createDirectory()
     return self.jobdir
示例#25
0
 def test_load_error_on_unreadable_key_file(self):
     """
     A ``PathError`` is raised if the key file path given to
     ``RootCredential.from_path`` cannot be opened for reading.
     """
     path = FilePath(self.mktemp())
     path.makedirs()
     crt_path = path.child(AUTHORITY_CERTIFICATE_FILENAME)
     crt_file = crt_path.open(b'w')
     crt_file.write(b"dummy")
     crt_file.close()
     key_path = path.child(AUTHORITY_KEY_FILENAME)
     key_file = key_path.open(b'w')
     key_file.write(b"dummy")
     key_file.close()
     # make file unreadable
     key_path.chmod(0o100)
     e = self.assertRaises(
         PathError, RootCredential.from_path, path
     )
     expected = (
         "Unable to load certificate authority file. "
         "Permission denied {path}"
     ).format(path=key_path.path)
     self.assertEqual(str(e), expected)
示例#26
0
def flocker_deploy(test_case, deployment_config, application_config):
    """
    Run ``flocker-deploy`` with given configuration files.

    :param test_case: The ``TestCase`` running this unit test.
    :param dict deployment_config: The desired deployment configuration.
    :param dict application_config: The desired application configuration.
    """
    # This is duplicate code, see
    # https://clusterhq.atlassian.net/browse/FLOC-1903
    control_node = environ.get("FLOCKER_ACCEPTANCE_CONTROL_NODE")
    certificate_path = environ["FLOCKER_ACCEPTANCE_API_CERTIFICATES_PATH"]
    if control_node is None:
        raise SkipTest("Set control node address using "
                       "FLOCKER_ACCEPTANCE_CONTROL_NODE environment variable.")

    temp = FilePath(test_case.mktemp())
    temp.makedirs()

    deployment = temp.child(b"deployment.yml")
    deployment.setContent(safe_dump(deployment_config))

    application = temp.child(b"application.yml")
    application.setContent(safe_dump(application_config))
    check_call([b"flocker-deploy", b"--certificates-directory",
               certificate_path, control_node, deployment.path,
               application.path])
示例#27
0
 def test_load_error_on_unreadable_key_file(self):
     """
     A ``PathError`` is raised if the key file path given to
     ``UserCredential.from_path`` cannot be opened for reading.
     """
     path = FilePath(self.mktemp())
     path.makedirs()
     crt_path = path.child(self.cert_file_name)
     crt_file = crt_path.open(b'w')
     crt_file.write(b"dummy")
     crt_file.close()
     key_path = path.child(self.key_file_name)
     key_file = key_path.open(b'w')
     key_file.write(b"dummy")
     key_file.close()
     # make file unreadable
     key_path.chmod(0o100)
     e = self.assertRaises(
         PathError, cls.from_path,
         path, **kwargs
     )
     expected = (
         "Private key file could not be opened. "
         "Permission denied {path}"
     ).format(path=key_path.path)
     self.assertEqual(str(e), expected)
    def test_deferred_result(self):
        """
        ``DeployScript.main`` returns a ``Deferred`` on success.
        """
        temp = FilePath(self.mktemp())
        temp.makedirs()

        application_config_path = temp.child(b"app.yml")
        application_config_path.setContent(safe_dump({
            u"version": 1,
            u"applications": {},
        }))

        deployment_config_path = temp.child(b"deploy.yml")
        deployment_config_path.setContent(safe_dump({
            u"version": 1,
            u"nodes": {},
        }))

        options = DeployOptions()
        options.parseOptions([
            deployment_config_path.path, application_config_path.path])

        script = DeployScript()
        dummy_reactor = object()

        self.assertEqual(
            None,
            self.successResultOf(script.main(dummy_reactor, options))
        )
示例#29
0
def setup_config(test):
    """
    Create a configuration file and certificates for a dataset agent in a
    temporary directory.

    Sets ``config`` attribute on the test instance with the path to the
    config file.

    :param test: A ``TestCase`` instance.
    """
    ca_set, _ = get_credential_sets()
    scratch_directory = FilePath(test.mktemp())
    scratch_directory.makedirs()
    test.config = scratch_directory.child('dataset-config.yml')
    test.config.setContent(
        yaml.safe_dump({
            u"control-service": {
                u"hostname": u"10.0.0.1",
                u"port": 1234,
            },
            u"dataset": {
                u"backend": u"zfs",
            },
            u"version": 1,
        }))
    ca_set.copy_to(scratch_directory, node=True)
    test.ca_set = ca_set
    test.non_existent_file = scratch_directory.child('missing-config.yml')
示例#30
0
    def test_create(self):
        """
        You can create a directory from a template
        """
        t_root = FilePath(self.mktemp())
        t_root.makedirs()
        
        d1 = t_root.child('dir1')
        d1.makedirs()
        f1 = d1.child('foo')
        f1.setContent('foo content')
        d2 = d1.child('dir2')
        d2.makedirs()
        f2 = d2.child('bar')
        f2.setContent('bar content')

        dst = FilePath(self.mktemp())
        d = Directory(dst.path)
        # fake template root
        d.template_root = t_root
        
        d.create('dir1')
        self.assertTrue(dst.exists())
        self.assertEqual(dst.child('foo').getContent(), 'foo content')
        self.assertTrue(dst.child('dir2').exists())
        self.assertEqual(dst.child('dir2').child('bar').getContent(),
                         'bar content')
示例#31
0
def publish_artifacts_main(args, base_path, top_level):
    """
    Publish release artifacts.

    :param list args: The arguments passed to the scripts.
    :param FilePath base_path: The executable being run.
    :param FilePath top_level: The top-level of the flocker repository.
    """
    options = UploadOptions()

    try:
        options.parseOptions(args)
    except UsageError as e:
        sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
        raise SystemExit(1)
    except NotARelease:
        sys.stderr.write("%s: Can't publish artifacts for a non-release.\n" %
                         (base_path.basename(), ))
        raise SystemExit(1)
    except DocumentationRelease:
        sys.stderr.write("%s: Can't publish artifacts for a documentation "
                         "release.\n" % (base_path.basename(), ))
        raise SystemExit(1)

    dispatcher = ComposedDispatcher(
        [boto_dispatcher, yum_dispatcher, base_dispatcher])

    scratch_directory = FilePath(tempfile.mkdtemp(prefix=b'flocker-upload-'))
    scratch_directory.child('packages').createDirectory()
    scratch_directory.child('python').createDirectory()
    scratch_directory.child('pip').createDirectory()
    scratch_directory.child('vagrant').createDirectory()
    scratch_directory.child('homebrew').createDirectory()

    box_type = "flocker-tutorial"
    vagrant_prefix = 'vagrant/tutorial/'

    box_name = "{box_type}-{version}.box".format(
        box_type=box_type,
        version=options['flocker-version'],
    )

    box_url = "https://{bucket}.s3.amazonaws.com/{key}".format(
        bucket=options['target'],
        key=vagrant_prefix + box_name,
    )

    try:
        sync_perform(
            dispatcher=dispatcher,
            effect=sequence([
                upload_packages(
                    scratch_directory=scratch_directory.child('packages'),
                    target_bucket=options['target'],
                    version=options['flocker-version'],
                    build_server=options['build-server'],
                    top_level=top_level,
                ),
                upload_python_packages(
                    scratch_directory=scratch_directory.child('python'),
                    target_bucket=options['target'],
                    top_level=top_level,
                    output=sys.stdout,
                    error=sys.stderr,
                ),
                upload_pip_index(
                    scratch_directory=scratch_directory.child('pip'),
                    target_bucket=options['target'],
                ),
                Effect(
                    CopyS3Keys(
                        source_bucket=DEV_ARCHIVE_BUCKET,
                        source_prefix=vagrant_prefix,
                        destination_bucket=options['target'],
                        destination_prefix=vagrant_prefix,
                        keys=[box_name],
                    )),
                publish_vagrant_metadata(
                    version=options['flocker-version'],
                    box_url=box_url,
                    scratch_directory=scratch_directory.child('vagrant'),
                    box_name=box_type,
                    target_bucket=options['target'],
                ),
            ]),
        )

        publish_homebrew_recipe(
            homebrew_repo_url=options['homebrew-tap'],
            version=options['flocker-version'],
            source_bucket=options['target'],
            scratch_directory=scratch_directory.child('homebrew'),
            top_level=top_level,
        )

    finally:
        scratch_directory.remove()
示例#32
0
class TestGlobalConfig(SyncTestCase):
    def setUp(self):
        super(TestGlobalConfig, self).setUp()
        self.setup_tempdir()

    def setup_example(self):
        self.setup_tempdir()

    def setup_tempdir(self):
        self.temp = FilePath(self.mktemp())
        self.node_dir = FilePath(self.mktemp())
        self.tahoe_dir = self.useFixture(NodeDirectory(self.node_dir))

    @given(
        path_segments_without_dotfiles(), )
    def test_create(self, dirname):
        """
        ``create_global_configuration`` accepts a path that doesn't exist to which
        to write the configuration.
        """
        confdir = self.temp.child(dirname)
        config = create_global_configuration(confdir, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        self.assertThat(
            config,
            MatchesStructure(api_endpoint=Equals(u"tcp:1234"), ),
        )

    def test_create_existing_dir(self):
        """
        ``create_global_configuration`` raises ``ValueError`` if the configuration
        path passed to it already exists.
        """
        self.temp.makedirs()
        with ExpectedException(ValueError, ".*{}.*".format(self.temp.path)):
            create_global_configuration(self.temp, u"tcp:1234", self.node_dir,
                                        u"tcp:localhost:1234")

    def test_load_db(self):
        """
        ``load_global_configuration`` can read the global configuration written by
        ``create_global_configuration``.
        """
        create_global_configuration(self.temp, u"tcp:1234", self.node_dir,
                                    u"tcp:localhost:1234")
        config = load_global_configuration(self.temp)
        self.assertThat(
            config,
            MatchesStructure(
                api_endpoint=Equals(u"tcp:1234"),
                tahoe_client_url=Equals(
                    DecodedURL.from_text(u"http://127.0.0.1:9876/")),
            ))

    def test_load_db_no_such_directory(self):
        """
        ``load_global_configuration`` raises ``ValueError`` if passed a path which
        does not exist.
        """
        non_dir = self.temp.child("non-existent")
        with ExpectedException(ValueError, ".*{}.*".format(non_dir.path)):
            load_global_configuration(non_dir)

    def test_rotate_api_key(self):
        """
        ``GlobalConfigDatabase.rotate_api_token`` replaces the current API token
        with a new one.
        """
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        pre = config.api_token
        config.rotate_api_token()
        self.assertThat(config.api_token, NotEquals(pre))

    def test_change_api_endpoint(self):
        """
        An assignment that changes the value of
        ``GlobalConfigDatabase.api_endpoint`` results in the new value being
        available when the database is loaded again with
        ``load_global_configuration``.
        """
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        config.api_endpoint = "tcp:42"
        config2 = load_global_configuration(self.temp)
        self.assertThat(config2.api_endpoint, Equals(config.api_endpoint))
        self.assertThat(config2.api_endpoint, Equals("tcp:42"))
示例#33
0
class MagicFolderConfigRemoteSnapshotTests(SyncTestCase):
    """
    Tests for the ``MagicFolderConfig`` APIs that deal with remote snapshots.
    """
    def setUp(self):
        super(MagicFolderConfigRemoteSnapshotTests, self).setUp()
        self.author = create_local_author(u"alice")

    def setup_example(self):
        self.temp = FilePath(self.mktemp())
        self.stash = self.temp.child("stash")
        self.stash.makedirs()
        self.magic = self.temp.child(b"magic")
        self.magic.makedirs()

        self.db = MagicFolderConfig.initialize(
            u"some-folder",
            SQLite3DatabaseLocation.memory(),
            self.author,
            self.stash,
            u"URI:DIR2-RO:aaa:bbb",
            u"URI:DIR2:ccc:ddd",
            self.magic,
            60,
        )

    @given(
        remote_snapshots(), )
    def test_remotesnapshot_roundtrips(self, snapshot):
        """
        The capability for a ``RemoteSnapshot`` added with
        ``MagicFolderConfig.store_remotesnapshot`` can be read back with
        ``MagicFolderConfig.get_remotesnapshot``.
        """
        self.db.store_remotesnapshot(snapshot.name, snapshot)
        loaded = self.db.get_remotesnapshot(snapshot.name)
        self.assertThat(
            snapshot.capability,
            Equals(loaded),
        )

    @given(
        path_segments(), )
    def test_remotesnapshot_not_found(self, path):
        """
        ``MagicFolderConfig.get_remotesnapshot`` raises ``KeyError`` if there is
        no known remote snapshot for the given path.
        """
        with ExpectedException(KeyError, escape(repr(path))):
            self.db.get_remotesnapshot(path)

    @given(
        # Get two RemoteSnapshots with the same path.
        path_segments().flatmap(
            lambda path: lists(
                remote_snapshots(names=just(path)),
                min_size=2,
                max_size=2,
            ), ), )
    def test_replace_remotesnapshot(self, snapshots):
        """
        A ``RemoteSnapshot`` for a given path can be replaced by a new
        ``RemoteSnapshot`` for the same path.
        """
        path = snapshots[0].name
        self.db.store_remotesnapshot(path, snapshots[0])
        self.db.store_remotesnapshot(path, snapshots[1])
        loaded = self.db.get_remotesnapshot(path)
        self.assertThat(
            snapshots[1].capability,
            Equals(loaded),
        )
示例#34
0
class StoreLocalSnapshotTests(SyncTestCase):
    """
    Tests for the ``MagicFolderConfig`` APIs which store and load
    ``LocalSnapshot`` objects.
    """
    def setUp(self):
        super(StoreLocalSnapshotTests, self).setUp()
        self.author = create_local_author(u"alice")

    def setup_example(self):
        self.temp = FilePath(self.mktemp())
        self.stash = self.temp.child("stash")
        self.stash.makedirs()
        self.magic = self.temp.child(b"magic")
        self.magic.makedirs()

        self.db = MagicFolderConfig.initialize(
            u"some-folder",
            SQLite3DatabaseLocation.memory(),
            self.author,
            self.stash,
            u"URI:DIR2-RO:aaa:bbb",
            u"URI:DIR2:ccc:ddd",
            self.magic,
            60,
        )

    @given(
        content1=binary(min_size=1),
        content2=binary(min_size=1),
        filename=magic_folder_filenames(),
        stash_subdir=path_segments(),
    )
    def test_serialize_store_deserialize_snapshot(self, content1, content2,
                                                  filename, stash_subdir):
        """
        create a new snapshot (this will have no parent snapshots).
        """
        data1 = BytesIO(content1)

        snapshots = []

        d = create_snapshot(
            name=filename,
            author=self.author,
            data_producer=data1,
            snapshot_stash_dir=self.stash,
            parents=[],
        )
        d.addCallback(snapshots.append)

        self.assertThat(
            d,
            succeeded(Always()),
        )

        self.db.store_local_snapshot(snapshots[0])

        # now modify the same file and create a new local snapshot
        data2 = BytesIO(content2)
        d = create_snapshot(
            name=filename,
            author=self.author,
            data_producer=data2,
            snapshot_stash_dir=self.stash,
            parents=[snapshots[0]],
        )
        d.addCallback(snapshots.append)

        # serialize and store the snapshot in db.
        # It should rewrite the previously written row.
        self.db.store_local_snapshot(snapshots[1])

        # now read back the serialized snapshot from db
        reconstructed_local_snapshot = self.db.get_local_snapshot(filename)

        self.assertThat(
            reconstructed_local_snapshot,
            MatchesStructure(name=Equals(filename),
                             parents_local=HasLength(1)))

        # the initial snapshot does not have parent snapshots
        self.assertThat(reconstructed_local_snapshot.parents_local[0],
                        MatchesStructure(parents_local=HasLength(0), ))

    @given(
        local_snapshots(), )
    def test_delete_localsnapshot(self, snapshot):
        """
        After a local snapshot is deleted from the database,
        ``MagicFolderConfig.get_local_snapshot`` raises ``KeyError`` for that
        snapshot's path.
        """
        self.db.store_local_snapshot(snapshot)
        self.db.delete_localsnapshot(snapshot.name)
        with ExpectedException(KeyError, escape(repr(snapshot.name))):
            self.db.get_local_snapshot(snapshot.name)
示例#35
0
    class CredentialTests(SynchronousTestCase):
        """
        Base test case for credential tests.
        """
        def setUp(self):
            self.start_date = datetime.datetime.utcnow()
            self.expiry_date = self.start_date + datetime.timedelta(
                seconds=EXPIRY_20_YEARS)
            self.cert_file_name = expected_file_name + b".crt"
            self.key_file_name = expected_file_name + b".key"
            self.path = FilePath(self.mktemp())
            self.path.makedirs()
            self.ca = RootCredential.initialize(
                self.path, b"mycluster", begin=self.start_date
            )
            self.credential = cls.initialize(
                self.path, self.ca, begin=self.start_date, **kwargs
            )
            for k, v in kwargs.iteritems():
                setattr(self, k, v)

        def test_certificate_matches_public_key(self):
            """
            A certificate's public key matches the public key it is
            meant to be paired with.
            """
            self.assertTrue(
                self.credential.credential.keypair.keypair.matches(
                    self.credential.credential.certificate.getPublicKey())
            )

        def test_certificate_matches_private_key(self):
            """
            A certificate matches the private key it is meant to
            be paired with.
            """
            priv = self.credential.credential.keypair.keypair.original
            pub = self.credential.credential.certificate
            pub = pub.getPublicKey().original
            pub_asn1 = crypto.dump_privatekey(crypto.FILETYPE_ASN1, pub)
            priv_asn1 = crypto.dump_privatekey(crypto.FILETYPE_ASN1, priv)
            pub_der = asn1.DerSequence()
            pub_der.decode(pub_asn1)
            priv_der = asn1.DerSequence()
            priv_der.decode(priv_asn1)
            pub_modulus = pub_der[1]
            priv_modulus = priv_der[1]
            self.assertEqual(pub_modulus, priv_modulus)

        def test_written_keypair_reloads(self):
            """
            A keypair written by ``UserCredential.initialize`` can be
            successfully reloaded in to an identical ``ControlCertificate``
            instance.
            """
            self.assertEqual(
                self.credential,
                cls.from_path(self.path, **kwargs)
            )

        def test_create_error_on_non_existent_path(self):
            """
            A ``PathError`` is raised if the path given to
            ``UserCredential.initialize`` does not exist.
            """
            path = FilePath(self.mktemp())
            e = self.assertRaises(
                PathError, cls.initialize,
                path, self.ca, **kwargs
            )
            expected = (b"Unable to write certificate file. "
                        b"No such file or directory {path}").format(
                            path=path.child(self.cert_file_name).path)
            self.assertEqual(str(e), expected)

        def test_load_error_on_non_existent_path(self):
            """
            A ``PathError`` is raised if the path given to
            ``UserCredential.from_path`` does not exist.
            """
            path = FilePath(self.mktemp())
            e = self.assertRaises(
                PathError, cls.from_path,
                path, **kwargs
            )
            expected = (b"Certificate file could not be opened. "
                        b"No such file or directory {path}").format(
                            path=path.child(self.cert_file_name).path)
            self.assertEqual(str(e), expected)

        def test_load_error_on_non_existent_certificate_file(self):
            """
            A ``PathError`` is raised if the certificate file path given to
            ``UserCredential.from_path`` does not exist.
            """
            path = FilePath(self.mktemp())
            path.makedirs()
            e = self.assertRaises(
                PathError, cls.from_path,
                path, **kwargs
            )
            expected = ("Certificate file could not be opened. "
                        "No such file or directory "
                        "{path}").format(
                path=path.child(self.cert_file_name).path)
            self.assertEqual(str(e), expected)

        def test_load_error_on_non_existent_key_file(self):
            """
            A ``PathError`` is raised if the key file path given to
            ``UserCredential.from_path`` does not exist.
            """
            path = FilePath(self.mktemp())
            path.makedirs()
            crt_path = path.child(self.cert_file_name)
            crt_file = crt_path.open(b'w')
            crt_file.write(self.credential.credential.certificate.dumpPEM())
            crt_file.close()
            e = self.assertRaises(
                PathError, cls.from_path,
                path, **kwargs
            )
            expected = ("Private key file could not be opened. "
                        "No such file or directory "
                        "{path}").format(
                            path=path.child(self.key_file_name).path)
            self.assertEqual(str(e), expected)

        @not_root
        @skip_on_broken_permissions
        def test_load_error_on_unreadable_certificate_file(self):
            """
            A ``PathError`` is raised if the certificate file path given to
            ``UserCredential.from_path`` cannot be opened for reading.
            """
            path = FilePath(self.mktemp())
            path.makedirs()
            crt_path = path.child(self.cert_file_name)
            crt_file = crt_path.open(b'w')
            crt_file.write(b"dummy")
            crt_file.close()
            # make file unreadable
            crt_path.chmod(0o100)
            key_path = path.child(self.key_file_name)
            key_file = key_path.open(b'w')
            key_file.write(b"dummy")
            key_file.close()
            # make file unreadable
            key_path.chmod(0o100)
            e = self.assertRaises(
                PathError, cls.from_path,
                path, **kwargs
            )
            expected = (
                "Certificate file could not be opened. "
                "Permission denied {path}"
            ).format(path=crt_path.path)
            self.assertEqual(str(e), expected)

        @not_root
        @skip_on_broken_permissions
        def test_load_error_on_unreadable_key_file(self):
            """
            A ``PathError`` is raised if the key file path given to
            ``UserCredential.from_path`` cannot be opened for reading.
            """
            path = FilePath(self.mktemp())
            path.makedirs()
            crt_path = path.child(self.cert_file_name)
            crt_file = crt_path.open(b'w')
            crt_file.write(self.credential.credential.certificate.dumpPEM())
            crt_file.close()
            key_path = path.child(self.key_file_name)
            key_file = key_path.open(b'w')
            key_file.write(b"dummy")
            key_file.close()
            # make file unreadable
            key_path.chmod(0o100)
            e = self.assertRaises(
                PathError, cls.from_path,
                path, **kwargs
            )
            expected = (
                "Private key file could not be opened. "
                "Permission denied {path}"
            ).format(path=key_path.path)
            self.assertEqual(str(e), expected)

        def test_certificate_ou_matches_ca(self):
            """
            A certificate written by ``UserCredential.initialize`` has the
            issuing authority's organizational unit as its organizational
            unit name.
            """
            cert = self.credential.credential.certificate.original
            issuer = cert.get_issuer()
            subject = cert.get_subject()
            self.assertEqual(
                issuer.OU,
                subject.OU
            )

        def test_certificate_is_signed_by_ca(self):
            """
            A certificate written by ``UserCredential.initialize`` is signed by
            the certificate authority.
            """
            cert = self.credential.credential.certificate.original
            issuer = cert.get_issuer()
            self.assertEqual(
                issuer.CN,
                self.ca.credential.certificate.getSubject().CN
            )

        def test_certificate_expiration(self):
            """
            A certificate written by ``UserCredential.initialize`` has an
            expiry date 20 years from the date of signing.
            """
            cert = self.credential.credential.certificate.original
            date_str = cert.get_notAfter()
            expected_expiry = self.expiry_date.strftime("%Y%m%d%H%M%SZ")
            self.assertEqual(date_str, expected_expiry)

        def test_certificate_is_rsa_4096_sha_256(self):
            """
            A certificate written by ``UserCredential.initialize`` is an RSA
            4096 bit, SHA-256 format.
            """
            cert = self.credential.credential.certificate.original
            key = self.credential.credential.certificate
            key = key.getPublicKey().original
            self.assertEqual(
                (crypto.TYPE_RSA, 4096, b'sha256WithRSAEncryption'),
                (key.type(), key.bits(), cert.get_signature_algorithm())
            )

        def test_keypair_correct_umask(self):
            """
            A keypair file written by ``NodeCredential.initialize`` has
            the correct permissions (0600).
            """
            key_path = self.path.child(self.key_file_name)
            st = os.stat(key_path.path)
            self.assertEqual(b'0600', oct(st.st_mode & 0777))

        def test_certificate_correct_permission(self):
            """
            A certificate file written by ``NodeCredential.initialize`` has
            the correct access mode set (0600).
            """
            cert_path = self.path.child(self.cert_file_name)
            st = os.stat(cert_path.path)
            self.assertEqual(b'0600', oct(st.st_mode & 0777))

        def test_written_keypair_exists(self):
            """
            ``NodeCredential.initialize`` writes a PEM file to the
            specified path.
            """
            self.assertEqual(
                (True, True),
                (self.path.child(self.cert_file_name).exists(),
                 self.path.child(self.key_file_name).exists())
            )
示例#36
0
class Alpha(object):
    """
    Minecraft Alpha world serializer.

    This serializer supports the classic folder and file layout used in
    Minecraft Alpha and early versions of Minecraft Beta.
    """

    implements(ISerializer)
    classProvides(ISerializerFactory)

    name = "alpha"

    def __init__(self, url):
        parsed = urlparse(url)
        if parsed.scheme != "file":
            raise Exception("I am not okay with scheme %s" % parsed.scheme)

        self.folder = FilePath(parsed.path)
        if not self.folder.exists():
            self.folder.makedirs()
            log.msg("Creating new world in %s" % self.folder)

        self._entity_loaders = {
            "Item": self._load_item_from_tag,
        }

        self._entity_savers = {
            "Item": self._save_item_to_tag,
        }

        self._tile_loaders = {
            "Chest": self._load_chest_from_tag,
            "Furnace": self._load_furnace_from_tag,
            "MobSpawner": self._load_mobspawner_from_tag,
            "Sign": self._load_sign_from_tag,
        }

        self._tile_savers = {
            "Chest": self._save_chest_to_tag,
            "Furnace": self._save_furnace_to_tag,
            "MobSpawner": self._save_mobspawner_to_tag,
            "Sign": self._save_sign_to_tag,
        }

    # Disk I/O helpers. Highly useful for keeping these few lines in one
    # place.

    def _read_tag(self, fp):
        if fp.exists() and fp.getsize():
            return NBTFile(fileobj=fp.open("r"))
        return None

    def _write_tag(self, fp, tag):
        tag.write_file(fileobj=fp.open("w"))

    # Entity serializers.

    def _load_entity_from_tag(self, tag):
        location = Location()

        position = tag["Pos"].tags
        rotation = tag["Rotation"].tags
        location.x = position[0].value
        location.y = position[1].value
        location.z = position[2].value
        location.yaw = rotation[0].value
        location.pitch = rotation[1].value
        location.grounded = bool(tag["OnGround"])

        entity = entities[tag["id"].value](location=location)

        self._entity_loaders[entity.name](entity, tag)

        return entity

    def _save_entity_to_tag(self, entity):
        tag = NBTFile()
        tag.name = ""

        tag["id"] = TAG_String(entity.name)

        position = [entity.location.x, entity.location.y, entity.location.z]
        tag["Pos"] = TAG_List(type=TAG_Double)
        tag["Pos"].tags = [TAG_Double(i) for i in position]

        rotation = [entity.location.yaw, entity.location.pitch]
        tag["Rotation"] = TAG_List(type=TAG_Double)
        tag["Rotation"].tags = [TAG_Double(i) for i in rotation]

        tag["OnGround"] = TAG_Byte(int(entity.location.grounded))

        self._entity_savers[entity.name](entity, tag)

        return tag

    def _load_item_from_tag(self, item, tag):
        item.item = tag["Item"]["id"].value, tag["Item"]["Damage"].value
        item.quantity = tag["Item"]["Count"].value

    def _save_item_to_tag(self, item, tag):
        tag["Item"] = TAG_Compound()
        tag["Item"]["id"] = TAG_Short(item.item[0])
        tag["Item"]["Damage"] = TAG_Short(item.item[1])
        tag["Item"]["Count"] = TAG_Short(item.quantity)

    # Tile serializers. Tiles are blocks and entities at the same time, in the
    # worst way. Each of these helpers will be called during chunk serialize
    # and deserialize automatically; they never need to be called directly.

    def _load_tile_from_tag(self, tag):
        """
        Load a tile from a tag.

        This method will gladly raise exceptions which must be handled by the
        caller.
        """

        tile = tiles[tag["id"].value](tag["x"].value, tag["y"].value,
                                      tag["z"].value)

        self._tile_loaders[tile.name](tile, tag)

        return tile

    def _save_tile_to_tag(self, tile):
        tag = NBTFile()
        tag.name = ""

        tag["id"] = TAG_String(tile.name)

        tag["x"] = TAG_Int(tile.x)
        tag["y"] = TAG_Int(tile.y)
        tag["z"] = TAG_Int(tile.z)

        self._tile_savers[tile.name](tile, tag)

        return tag

    def _load_chest_from_tag(self, chest, tag):
        self._load_inventory_from_tag(chest.inventory, tag["Items"])

    def _save_chest_to_tag(self, chest, tag):
        tag["Items"] = self._save_inventory_to_tag(chest.inventory)

    def _load_furnace_from_tag(self, furnace, tag):
        furnace.burntime = tag["BurnTime"].value
        furnace.cooktime = tag["CookTime"].value

        self._load_inventory_from_tag(furnace.inventory, tag["Items"])

    def _save_furnace_to_tag(self, furnace, tag):
        tag["BurnTime"] = TAG_Short(furnace.burntime)
        tag["CookTime"] = TAG_Short(furnace.cooktime)

        tag["Items"] = self._save_inventory_to_tag(furnace.inventory)

    def _load_mobspawner_from_tag(self, ms, tag):
        ms.mob = tag["EntityId"].value
        ms.delay = tag["Delay"].value

    def _save_mobspawner_to_tag(self, ms, tag):
        tag["EntityId"] = TAG_String(ms.mob)
        tag["Delay"] = TAG_Short(ms.delay)

    def _load_sign_from_tag(self, sign, tag):
        sign.text1 = tag["Text1"].value
        sign.text2 = tag["Text2"].value
        sign.text3 = tag["Text3"].value
        sign.text4 = tag["Text4"].value

    def _save_sign_to_tag(self, sign, tag):
        tag["Text1"] = TAG_String(sign.text1)
        tag["Text2"] = TAG_String(sign.text2)
        tag["Text3"] = TAG_String(sign.text3)
        tag["Text4"] = TAG_String(sign.text4)

    # Chunk serializers. These are split out in order to faciliate reuse in
    # the Beta serializer.

    def _load_chunk_from_tag(self, chunk, tag):
        """
        Load a chunk from a tag.

        We cannot instantiate chunks, ever, so pass it in from above.
        """

        level = tag["Level"]

        # These are designed to raise if there are any issues, but still be
        # speedy.
        chunk.blocks = fromstring(level["Blocks"].value,
                                  dtype=uint8).reshape(chunk.blocks.shape)
        chunk.heightmap = fromstring(level["HeightMap"].value,
                                     dtype=uint8).reshape(
                                         chunk.heightmap.shape)
        chunk.blocklight = array(unpack_nibbles(
            level["BlockLight"].value)).reshape(chunk.blocklight.shape)
        chunk.metadata = array(unpack_nibbles(level["Data"].value)).reshape(
            chunk.metadata.shape)
        chunk.skylight = array(unpack_nibbles(
            level["SkyLight"].value)).reshape(chunk.skylight.shape)

        chunk.populated = bool(level["TerrainPopulated"])

        if "Entities" in level:
            for tag in level["Entities"].tags:
                try:
                    entity = self._load_entity_from_tag(tag)
                    chunk.entities.add(entity)
                except KeyError:
                    print "Unknown entity %s" % tag["id"].value
                    print "Tag for entity:"
                    print tag.pretty_tree()

        if "TileEntities" in level:
            for tag in level["TileEntities"].tags:
                try:
                    tile = self._load_tile_from_tag(tag)
                    chunk.tiles[tile.x, tile.y, tile.z] = tile
                except KeyError:
                    print "Unknown tile entity %s" % tag["id"].value

        chunk.dirty = not chunk.populated

    def _save_chunk_to_tag(self, chunk):
        tag = NBTFile()
        tag.name = ""

        level = TAG_Compound()
        tag["Level"] = level

        level["Blocks"] = TAG_Byte_Array()
        level["HeightMap"] = TAG_Byte_Array()
        level["BlockLight"] = TAG_Byte_Array()
        level["Data"] = TAG_Byte_Array()
        level["SkyLight"] = TAG_Byte_Array()

        level["Blocks"].value = chunk.blocks.tostring()
        level["HeightMap"].value = chunk.heightmap.tostring()
        level["BlockLight"].value = pack_nibbles(chunk.blocklight)
        level["Data"].value = pack_nibbles(chunk.metadata)
        level["SkyLight"].value = pack_nibbles(chunk.skylight)

        level["TerrainPopulated"] = TAG_Byte(chunk.populated)

        level["Entities"] = TAG_List(type=TAG_Compound)
        for entity in chunk.entities:
            try:
                entitytag = self._save_entity_to_tag(entity)
                level["Entities"].tags.append(entitytag)
            except KeyError:
                print "Unknown entity %s" % entity.name

        level["TileEntities"] = TAG_List(type=TAG_Compound)
        for tile in chunk.tiles.itervalues():
            try:
                tiletag = self._save_tile_to_tag(tile)
                level["TileEntities"].tags.append(tiletag)
            except KeyError:
                print "Unknown tile entity %s" % tile.name

        return tag

    def _load_inventory_from_tag(self, inventory, tag):
        """
        Load an inventory from a tag.

        Due to quirks of inventory, we cannot instantiate the inventory here;
        instead, act on an inventory passed in from above.
        """
        items = [None] * len(inventory)

        for item in tag.tags:
            slot = item["Slot"].value
            items[slot] = (item["id"].value, item["Damage"].value,
                           item["Count"].value)

        inventory.load_from_list(items)

    def _save_inventory_to_tag(self, inventory):
        tag = TAG_List(type=TAG_Compound)

        for i, item in enumerate(
                chain(inventory.crafted, inventory.crafting, inventory.armor,
                      inventory.storage, inventory.holdables)):
            if item is not None:
                d = TAG_Compound()
                id, damage, count = item
                d["id"] = TAG_Short(id)
                d["Damage"] = TAG_Short(damage)
                d["Count"] = TAG_Byte(count)
                d["Slot"] = TAG_Byte(i)
                tag.tags.append(d)

        return tag

    def _save_level_to_tag(self, level):
        tag = NBTFile()
        tag.name = ""

        tag["Data"] = TAG_Compound()
        tag["Data"]["RandomSeed"] = TAG_Long(level.seed)
        tag["Data"]["SpawnX"] = TAG_Int(level.spawn[0])
        tag["Data"]["SpawnY"] = TAG_Int(level.spawn[1])
        tag["Data"]["SpawnZ"] = TAG_Int(level.spawn[2])

        return tag

    # ISerializer API.

    def load_chunk(self, chunk):
        first, second, filename = names_for_chunk(chunk.x, chunk.z)
        fp = self.folder.child(first).child(second)
        if not fp.exists():
            fp.makedirs()
        fp = fp.child(filename)

        tag = self._read_tag(fp)
        if not tag:
            return

        self._load_chunk_from_tag(chunk, tag)

    def save_chunk(self, chunk):
        tag = self._save_chunk_to_tag(chunk)

        first, second, filename = names_for_chunk(chunk.x, chunk.z)
        fp = self.folder.child(first).child(second)
        if not fp.exists():
            fp.makedirs()
        fp = fp.child(filename)

        self._write_tag(fp, tag)

    def load_level(self, level):
        tag = self._read_tag(self.folder.child("level.dat"))
        if not tag:
            return

        level.spawn = (tag["Data"]["SpawnX"].value,
                       tag["Data"]["SpawnY"].value,
                       tag["Data"]["SpawnZ"].value)

        level.seed = tag["Data"]["RandomSeed"].value

    def save_level(self, level):
        tag = self._save_level_to_tag(level)

        self._write_tag(self.folder.child("level.dat"), tag)

    def load_player(self, player):
        fp = self.folder.child("players").child("%s.dat" % player.username)
        tag = self._read_tag(fp)
        if not tag:
            return

        player.location.x, player.location.y, player.location.z = [
            i.value for i in tag["Pos"].tags
        ]

        player.location.yaw = tag["Rotation"].tags[0].value
        player.location.pitch = tag["Rotation"].tags[1].value

        if "Inventory" in tag:
            self._load_inventory_from_tag(player.inventory, tag["Inventory"])

    def save_player(self, player):
        tag = NBTFile()
        tag.name = ""

        tag["Pos"] = TAG_List(type=TAG_Double)
        tag["Pos"].tags = [
            TAG_Double(i)
            for i in (player.location.x, player.location.y, player.location.z)
        ]

        tag["Rotation"] = TAG_List(type=TAG_Double)
        tag["Rotation"].tags = [
            TAG_Double(i) for i in (player.location.yaw, player.location.pitch)
        ]

        tag["Inventory"] = self._save_inventory_to_tag(player.inventory)

        fp = self.folder.child("players")
        if not fp.exists():
            fp.makedirs()
        fp = fp.child("%s.dat" % player.username)
        self._write_tag(fp, tag)

    def get_plugin_data_path(self, name):
        return self.folder.child(name + '.dat')

    def load_plugin_data(self, name):
        path = self.get_plugin_data_path(name)
        if not path.exists():
            return ""
        else:
            f = path.open("r")
            return f.read()

    def save_plugin_data(self, name, value):
        path = self.get_plugin_data_path(name)
        path.setContent(value)
示例#37
0
class DirDbmTests(unittest.TestCase):
    def setUp(self):
        self.path = FilePath(self.mktemp())
        self.dbm = dirdbm.open(self.path.path)
        self.items = ((b"abc", b"foo"), (b"/lalal", b"\000\001"), (b"\000\012",
                                                                   b"baz"))

    def test_all(self):
        k = b64decode("//==")
        self.dbm[k] = b"a"
        self.dbm[k] = b"a"
        self.assertEqual(self.dbm[k], b"a")

    def test_rebuildInteraction(self):
        s = dirdbm.Shelf("dirdbm.rebuild.test")
        s[b"key"] = b"value"
        rebuild.rebuild(dirdbm)

    def test_dbm(self):
        d = self.dbm

        # Insert keys
        keys = []
        values = set()
        for k, v in self.items:
            d[k] = v
            keys.append(k)
            values.add(v)
        keys.sort()

        # Check they exist
        for k, v in self.items:
            self.assertIn(k, d)
            self.assertEqual(d[k], v)

        # Check non existent key
        try:
            d[b"XXX"]
        except KeyError:
            pass
        else:
            assert 0, "didn't raise KeyError on non-existent key"

        # Check keys(), values() and items()
        dbkeys = d.keys()
        dbvalues = set(d.values())
        dbitems = set(d.items())
        dbkeys.sort()
        items = set(self.items)
        self.assertEqual(
            keys,
            dbkeys,
            f".keys() output didn't match: {repr(keys)} != {repr(dbkeys)}",
        )
        self.assertEqual(
            values,
            dbvalues,
            ".values() output didn't match: {} != {}".format(
                repr(values), repr(dbvalues)),
        )
        self.assertEqual(
            items,
            dbitems,
            f"items() didn't match: {repr(items)} != {repr(dbitems)}",
        )

        copyPath = self.mktemp()
        d2 = d.copyTo(copyPath)

        copykeys = d.keys()
        copyvalues = set(d.values())
        copyitems = set(d.items())
        copykeys.sort()

        self.assertEqual(
            dbkeys,
            copykeys,
            ".copyTo().keys() didn't match: {} != {}".format(
                repr(dbkeys), repr(copykeys)),
        )
        self.assertEqual(
            dbvalues,
            copyvalues,
            ".copyTo().values() didn't match: %s != %s" %
            (repr(dbvalues), repr(copyvalues)),
        )
        self.assertEqual(
            dbitems,
            copyitems,
            ".copyTo().items() didn't match: %s != %s" %
            (repr(dbkeys), repr(copyitems)),
        )

        d2.clear()
        self.assertTrue(
            len(d2.keys()) == len(d2.values()) == len(d2.items()) == len(d2) ==
            0,
            ".clear() failed",
        )
        self.assertNotEqual(len(d), len(d2))
        shutil.rmtree(copyPath)

        # Delete items
        for k, v in self.items:
            del d[k]
            self.assertNotIn(
                k, d, "key is still in database, even though we deleted it")
        self.assertEqual(len(d.keys()), 0, "database has keys")
        self.assertEqual(len(d.values()), 0, "database has values")
        self.assertEqual(len(d.items()), 0, "database has items")
        self.assertEqual(len(d), 0, "database has items")

    def test_modificationTime(self):
        import time

        # The mtime value for files comes from a different place than the
        # gettimeofday() system call. On linux, gettimeofday() can be
        # slightly ahead (due to clock drift which gettimeofday() takes into
        # account but which open()/write()/close() do not), and if we are
        # close to the edge of the next second, time.time() can give a value
        # which is larger than the mtime which results from a subsequent
        # write(). I consider this a kernel bug, but it is beyond the scope
        # of this test. Thus we keep the range of acceptability to 3 seconds time.
        # -warner
        self.dbm[b"k"] = b"v"
        self.assertTrue(
            abs(time.time() - self.dbm.getModificationTime(b"k")) <= 3)
        self.assertRaises(KeyError, self.dbm.getModificationTime, b"nokey")

    def test_recovery(self):
        """
        DirDBM: test recovery from directory after a faked crash
        """
        k = self.dbm._encode(b"key1")
        with self.path.child(k + b".rpl").open(mode="wb") as f:
            f.write(b"value")

        k2 = self.dbm._encode(b"key2")
        with self.path.child(k2).open(mode="wb") as f:
            f.write(b"correct")
        with self.path.child(k2 + b".rpl").open(mode="wb") as f:
            f.write(b"wrong")

        with self.path.child("aa.new").open(mode="wb") as f:
            f.write(b"deleted")

        dbm = dirdbm.DirDBM(self.path.path)
        self.assertEqual(dbm[b"key1"], b"value")
        self.assertEqual(dbm[b"key2"], b"correct")
        self.assertFalse(self.path.globChildren("*.new"))
        self.assertFalse(self.path.globChildren("*.rpl"))

    def test_nonStringKeys(self):
        """
        L{dirdbm.DirDBM} operations only support string keys: other types
        should raise a L{TypeError}.
        """
        self.assertRaises(TypeError, self.dbm.__setitem__, 2, "3")
        try:
            self.assertRaises(TypeError, self.dbm.__setitem__, "2", 3)
        except unittest.FailTest:
            # dirdbm.Shelf.__setitem__ supports non-string values
            self.assertIsInstance(self.dbm, dirdbm.Shelf)
        self.assertRaises(TypeError, self.dbm.__getitem__, 2)
        self.assertRaises(TypeError, self.dbm.__delitem__, 2)
        self.assertRaises(TypeError, self.dbm.has_key, 2)
        self.assertRaises(TypeError, self.dbm.__contains__, 2)
        self.assertRaises(TypeError, self.dbm.getModificationTime, 2)

    def test_failSet(self):
        """
        Failure path when setting an item.
        """
        def _writeFail(path, data):
            path.setContent(data)
            raise OSError("fail to write")

        self.dbm[b"failkey"] = b"test"
        self.patch(self.dbm, "_writeFile", _writeFail)
        self.assertRaises(IOError, self.dbm.__setitem__, b"failkey", b"test2")
示例#38
0
class SSHUserAuthClientTests(TestCase):
    """
    Tests for L{SSHUserAuthClient}.

    @type rsaPublic: L{Key}
    @ivar rsaPublic: A public RSA key.
    """

    def setUp(self):
        self.rsaPublic = Key.fromString(keydata.publicRSA_openssh)
        self.tmpdir = FilePath(self.mktemp())
        self.tmpdir.makedirs()
        self.rsaFile = self.tmpdir.child('id_rsa')
        self.rsaFile.setContent(keydata.privateRSA_openssh)
        self.tmpdir.child('id_rsa.pub').setContent(keydata.publicRSA_openssh)


    def test_signDataWithAgent(self):
        """
        When connected to an agent, L{SSHUserAuthClient} can use it to
        request signatures of particular data with a particular L{Key}.
        """
        client = SSHUserAuthClient(b"user", ConchOptions(), None)
        agent = SSHAgentClient()
        transport = StringTransport()
        agent.makeConnection(transport)
        client.keyAgent = agent
        cleartext = b"Sign here"
        client.signData(self.rsaPublic, cleartext)
        self.assertEqual(
            transport.value(),
            b"\x00\x00\x00\x8b\r\x00\x00\x00u" + self.rsaPublic.blob() +
            b"\x00\x00\x00\t" + cleartext +
            b"\x00\x00\x00\x00")


    def test_agentGetPublicKey(self):
        """
        L{SSHUserAuthClient} looks up public keys from the agent using the
        L{SSHAgentClient} class.  That L{SSHAgentClient.getPublicKey} returns a
        L{Key} object with one of the public keys in the agent.  If no more
        keys are present, it returns L{None}.
        """
        agent = SSHAgentClient()
        agent.blobs = [self.rsaPublic.blob()]
        key = agent.getPublicKey()
        self.assertTrue(key.isPublic())
        self.assertEqual(key, self.rsaPublic)
        self.assertIsNone(agent.getPublicKey())


    def test_getPublicKeyFromFile(self):
        """
        L{SSHUserAuthClient.getPublicKey()} is able to get a public key from
        the first file described by its options' C{identitys} list, and return
        the corresponding public L{Key} object.
        """
        options = ConchOptions()
        options.identitys = [self.rsaFile.path]
        client = SSHUserAuthClient(b"user",  options, None)
        key = client.getPublicKey()
        self.assertTrue(key.isPublic())
        self.assertEqual(key, self.rsaPublic)


    def test_getPublicKeyAgentFallback(self):
        """
        If an agent is present, but doesn't return a key,
        L{SSHUserAuthClient.getPublicKey} continue with the normal key lookup.
        """
        options = ConchOptions()
        options.identitys = [self.rsaFile.path]
        agent = SSHAgentClient()
        client = SSHUserAuthClient(b"user",  options, None)
        client.keyAgent = agent
        key = client.getPublicKey()
        self.assertTrue(key.isPublic())
        self.assertEqual(key, self.rsaPublic)


    def test_getPublicKeyBadKeyError(self):
        """
        If L{keys.Key.fromFile} raises a L{keys.BadKeyError}, the
        L{SSHUserAuthClient.getPublicKey} tries again to get a public key by
        calling itself recursively.
        """
        options = ConchOptions()
        self.tmpdir.child('id_dsa.pub').setContent(keydata.publicDSA_openssh)
        dsaFile = self.tmpdir.child('id_dsa')
        dsaFile.setContent(keydata.privateDSA_openssh)
        options.identitys = [self.rsaFile.path, dsaFile.path]
        self.tmpdir.child('id_rsa.pub').setContent(b'not a key!')
        client = SSHUserAuthClient(b"user",  options, None)
        key = client.getPublicKey()
        self.assertTrue(key.isPublic())
        self.assertEqual(key, Key.fromString(keydata.publicDSA_openssh))
        self.assertEqual(client.usedFiles, [self.rsaFile.path, dsaFile.path])


    def test_getPrivateKey(self):
        """
        L{SSHUserAuthClient.getPrivateKey} will load a private key from the
        last used file populated by L{SSHUserAuthClient.getPublicKey}, and
        return a L{Deferred} which fires with the corresponding private L{Key}.
        """
        rsaPrivate = Key.fromString(keydata.privateRSA_openssh)
        options = ConchOptions()
        options.identitys = [self.rsaFile.path]
        client = SSHUserAuthClient(b"user",  options, None)
        # Populate the list of used files
        client.getPublicKey()

        def _cbGetPrivateKey(key):
            self.assertFalse(key.isPublic())
            self.assertEqual(key, rsaPrivate)

        return client.getPrivateKey().addCallback(_cbGetPrivateKey)


    def test_getPrivateKeyPassphrase(self):
        """
        L{SSHUserAuthClient} can get a private key from a file, and return a
        Deferred called back with a private L{Key} object, even if the key is
        encrypted.
        """
        rsaPrivate = Key.fromString(keydata.privateRSA_openssh)
        passphrase = b'this is the passphrase'
        self.rsaFile.setContent(rsaPrivate.toString('openssh', passphrase))
        options = ConchOptions()
        options.identitys = [self.rsaFile.path]
        client = SSHUserAuthClient(b"user",  options, None)
        # Populate the list of used files
        client.getPublicKey()

        def _getPassword(prompt):
            self.assertEqual(
                prompt,
                "Enter passphrase for key '%s': " % (self.rsaFile.path,))
            return nativeString(passphrase)

        def _cbGetPrivateKey(key):
            self.assertFalse(key.isPublic())
            self.assertEqual(key, rsaPrivate)

        self.patch(client, '_getPassword', _getPassword)
        return client.getPrivateKey().addCallback(_cbGetPrivateKey)


    def test_getPassword(self):
        """
        Get the password using
        L{twisted.conch.client.default.SSHUserAuthClient.getPassword}
        """
        class FakeTransport:
            def __init__(self, host):
                self.transport = self
                self.host = host
            def getPeer(self):
                return self

        options = ConchOptions()
        client = SSHUserAuthClient(b"user",  options, None)
        client.transport = FakeTransport("127.0.0.1")

        def getpass(prompt):
            self.assertEqual(prompt, "[email protected]'s password: "******"""
        Get the password using
        L{twisted.conch.client.default.SSHUserAuthClient.getPassword}
        using a different prompt.
        """
        options = ConchOptions()
        client = SSHUserAuthClient(b"user",  options, None)
        prompt = b"Give up your password"

        def getpass(p):
            self.assertEqual(p, nativeString(prompt))
            return 'bad password'

        self.patch(default.getpass, 'getpass', getpass)
        d = client.getPassword(prompt)
        d.addCallback(self.assertEqual, b'bad password')
        return d

    test_getPasswordPrompt.skip = windowsSkip or ttySkip


    def test_getPasswordConchError(self):
        """
        Get the password using
        L{twisted.conch.client.default.SSHUserAuthClient.getPassword}
        and trigger a {twisted.conch.error import ConchError}.
        """
        options = ConchOptions()
        client = SSHUserAuthClient(b"user",  options, None)

        def getpass(prompt):
            raise KeyboardInterrupt("User pressed CTRL-C")

        self.patch(default.getpass, 'getpass', getpass)
        stdout, stdin = sys.stdout, sys.stdin
        d = client.getPassword(b'?')
        @d.addErrback
        def check_sys(fail):
            self.assertEqual(
                [stdout, stdin], [sys.stdout, sys.stdin])
            return fail
        self.assertFailure(d, ConchError)

    test_getPasswordConchError.skip = windowsSkip or ttySkip


    def test_getGenericAnswers(self):
        """
        L{twisted.conch.client.default.SSHUserAuthClient.getGenericAnswers}
        """
        options = ConchOptions()
        client = SSHUserAuthClient(b"user",  options, None)

        def getpass(prompt):
            self.assertEqual(prompt, "pass prompt")
            return "getpass"

        self.patch(default.getpass, 'getpass', getpass)

        def raw_input(prompt):
            self.assertEqual(prompt, "raw_input prompt")
            return "raw_input"

        self.patch(default, 'raw_input', raw_input)
        d = client.getGenericAnswers(
            b"Name", b"Instruction", [
                (b"pass prompt", False), (b"raw_input prompt", True)])
        d.addCallback(
            self.assertListEqual, ["getpass", "raw_input"])
        return d

    test_getGenericAnswers.skip = windowsSkip or ttySkip
示例#39
0
class OpenSSHFactoryTests(TestCase):
    """
    Tests for L{OpenSSHFactory}.
    """
    if getattr(os, "geteuid", None) is None:
        skip = "geteuid/seteuid not available"
    elif OpenSSHFactory is None:
        skip = "Cannot run without PyCrypto or PyASN1"

    def setUp(self):
        self.factory = OpenSSHFactory()
        self.keysDir = FilePath(self.mktemp())
        self.keysDir.makedirs()
        self.factory.dataRoot = self.keysDir.path

        self.keysDir.child("ssh_host_foo").setContent("foo")
        self.keysDir.child("bar_key").setContent("foo")
        self.keysDir.child("ssh_host_one_key").setContent(
            keydata.privateRSA_openssh)
        self.keysDir.child("ssh_host_two_key").setContent(
            keydata.privateDSA_openssh)
        self.keysDir.child("ssh_host_three_key").setContent(
            "not a key content")

        self.keysDir.child("ssh_host_one_key.pub").setContent(
            keydata.publicRSA_openssh)

        self.mockos = MockOS()
        self.patch(os, "seteuid", self.mockos.seteuid)
        self.patch(os, "setegid", self.mockos.setegid)

    def test_getPublicKeys(self):
        """
        L{OpenSSHFactory.getPublicKeys} should return the available public keys
        in the data directory
        """
        keys = self.factory.getPublicKeys()
        self.assertEquals(len(keys), 1)
        keyTypes = keys.keys()
        self.assertEqual(keyTypes, ['ssh-rsa'])

    def test_getPrivateKeys(self):
        """
        L{OpenSSHFactory.getPrivateKeys} should return the available private
        keys in the data directory.
        """
        keys = self.factory.getPrivateKeys()
        self.assertEquals(len(keys), 2)
        keyTypes = keys.keys()
        self.assertEqual(set(keyTypes), set(['ssh-rsa', 'ssh-dss']))
        self.assertEquals(self.mockos.seteuidCalls, [])
        self.assertEquals(self.mockos.setegidCalls, [])

    def test_getPrivateKeysAsRoot(self):
        """
        L{OpenSSHFactory.getPrivateKeys} should switch to root if the keys
        aren't readable by the current user.
        """
        keyFile = self.keysDir.child("ssh_host_two_key")
        # Fake permission error by changing the mode
        keyFile.chmod(0000)
        self.addCleanup(keyFile.chmod, 0777)
        # And restore the right mode when seteuid is called
        savedSeteuid = os.seteuid

        def seteuid(euid):
            keyFile.chmod(0777)
            return savedSeteuid(euid)

        self.patch(os, "seteuid", seteuid)
        keys = self.factory.getPrivateKeys()
        self.assertEquals(len(keys), 2)
        keyTypes = keys.keys()
        self.assertEqual(set(keyTypes), set(['ssh-rsa', 'ssh-dss']))
        self.assertEquals(self.mockos.seteuidCalls, [0, os.geteuid()])
        self.assertEquals(self.mockos.setegidCalls, [0, os.getegid()])
示例#40
0
class FileUploadResource(Resource):
    """
    Twisted Web resource that handles file uploads over `HTTP/POST` requests.
    """

    log = make_logger()

    def __init__(self,
                 upload_directory,
                 temp_directory,
                 form_fields,
                 upload_session,
                 options=None):
        """

        :param upload_directory: The target directory where uploaded files will be stored.
        :type upload_directory: str
        :param temp_directory: A temporary directory where chunks of a file being uploaded are stored.
        :type temp_directory: str
        :param form_fields: Names of HTML form fields used for uploading.
        :type form_fields: dict
        :param upload_session: An instance of `ApplicationSession` used for publishing progress events.
        :type upload_session: obj
        :param options: Options for file upload.
        :type options: dict or None
        """

        Resource.__init__(self)
        self._uploadRoot = FilePath(upload_directory)
        self._tempDirRoot = FilePath(temp_directory)
        self._form_fields = form_fields
        self._fileupload_session = upload_session
        self._options = options or {}
        self._max_file_size = self._options.get('max_file_size',
                                                10 * 1024 * 1024)
        self._fileTypes = self._options.get('file_types', None)
        self._file_permissions = self._options.get('file_permissions', None)

        # track uploaded files / chunks
        self._uploads = {}

        self.log.info('Upload Resource started.')

        # scan the temp dir for uploaded chunks and fill the _uploads dict with it
        # so existing uploads can be resumed
        # all other remains will be purged
        for fileTempDir in self._tempDirRoot.children():
            fileTempName = fileTempDir.basename()
            if fileTempDir.isdir():
                self._uploads[fileTempName] = {
                    'chunk_list': [],
                    'origin': 'startup'
                }
                for chunk in fileTempDir.listdir():
                    if chunk[:6] == 'chunk_':
                        self._uploads[fileTempName]['chunk_list'].append(
                            int(chunk[6:]))
                    else:
                        fileTempDir.child(chunk).remove()
                # if no chunks detected then remove remains completely
                if len(self._uploads[fileTempName]['chunk_list']) == 0:
                    fileTempDir.remove()
                    self._uploads.pop(fileTempName, None)
            else:  # fileTempDir is a file remaining from a single chunk upload
                fileTempDir.remove()

        self.log.debug("Scanned pending uploads: {uploads}",
                       uploads=self._uploads)

    def render_POST(self, request):
        headers = {
            x.decode('iso-8859-1'): y.decode('iso-8859-1')
            for x, y in request.getAllHeaders().items()
        }

        origin = headers['host']

        postFields = cgi.FieldStorage(fp=request.content,
                                      headers=headers,
                                      environ={"REQUEST_METHOD": "POST"})

        f = self._form_fields

        filename = postFields[f['file_name']].value
        totalSize = int(postFields[f['total_size']].value)
        totalChunks = int(postFields[f['total_chunks']].value)
        chunkSize = int(postFields[f['chunk_size']].value)
        chunkNumber = int(postFields[f['chunk_number']].value)
        fileContent = postFields[f['content']].value

        if 'chunk_extra' in f and f['chunk_extra'] in postFields:
            chunk_extra = json.loads(postFields[f['chunk_extra']].value)
        else:
            chunk_extra = {}

        if 'finish_extra' in f and f['finish_extra'] in postFields:
            finish_extra = json.loads(postFields[f['finish_extra']].value)
        else:
            finish_extra = {}

        fileId = filename

        # # prepare user specific upload areas
        # # NOT YET IMPLEMENTED
        # #
        # if 'auth_id' in f and f['auth_id'] in postFields:
        #     auth_id = postFields[f['auth_id']].value
        #     mydir = os.path.join(self._uploadRoot, auth_id)
        #     my_temp_dir = os.path.join(self._tempDirRoot, auth_id)
        #
        #     # check if auth_id is a valid directory_name
        #     #
        #     if auth_id != auth_id.encode('ascii', 'ignore'):
        #         msg = "The requestor auth_id must be an ascii string."
        #         if self._debug:
        #             log.msg(msg)
        #         # 415 Unsupported Media Type
        #         request.setResponseCode(415, msg)
        #         return msg
        # else:
        #     auth_id = 'anonymous'

        # create user specific folder

        # mydir = self._uploadRoot
        # my_temp_dir = self._tempDirRoot

        # if not os.path.exists(mydir):
        #     os.makedirs(mydir)
        # if not os.path.exists(my_temp_dir):
        #     os.makedirs(my_temp_dir)

        # prepare the on_progress publisher
        if 'on_progress' in f and f['on_progress'] in postFields and self._fileupload_session != {}:
            topic = postFields[f['on_progress']].value

            if 'session' in f and f['session'] in postFields:
                session = int(postFields[f['session']].value)
                publish_options = PublishOptions(eligible=[session])
            else:
                publish_options = None

            def fileupload_publish(payload):
                self._fileupload_session.publish(topic,
                                                 payload,
                                                 options=publish_options)
        else:

            def fileupload_publish(payload):
                pass

        # Register upload right at the start to avoid overlapping upload conflicts
        #
        if fileId not in self._uploads:
            self._uploads[fileId] = {'chunk_list': [], 'origin': origin}
            chunk_is_first = True
            self.log.debug(
                'Started upload of file: file_name={file_name}, total_size={total_size}, total_chunks={total_chunks}, chunk_size={chunk_size}, chunk_number={chunk_number}',
                file_name=fileId,
                total_size=totalSize,
                total_chunks=totalChunks,
                chunk_size=chunkSize,
                chunk_number=chunkNumber)
        else:
            chunk_is_first = False
            # If the chunks are read at startup of crossbar any client may claim and resume the pending upload !
            #
            upl = self._uploads[fileId]
            if upl['origin'] == 'startup':
                self.log.debug(
                    'Will try to resume upload of file: file_name={file_name}, total_size={total_size}, total_chunks={total_chunks}, chunk_size={chunk_size}, chunk_number={chunk_number}',
                    file_name=fileId,
                    total_size=totalSize,
                    total_chunks=totalChunks,
                    chunk_size=chunkSize,
                    chunk_number=chunkNumber)
                upl['origin'] = origin
            else:
                # check if another session is uploading this file already
                #
                if upl['origin'] != origin:
                    msg = "File being uploaded is already uploaded in a different session."
                    self.log.debug(msg)
                    # 409 Conflict
                    request.setResponseCode(409, msg.encode('utf8'))
                    return msg.encode('utf8')
                else:
                    # check if the chunk is being uploaded in this very session already
                    # this should never happen !
                    if chunkNumber in upl['chunk_list']:
                        msg = "Chunk beeing uploaded is already uploading."
                        self.log.debug(msg)
                        # Don't throw a conflict. This may be a wanted behaviour.
                        # Even if an upload would be resumable, you don't have to resume.
                        # 409 Conflict
                        # request.setResponseCode(409, msg.encode('utf8'))
                        # return msg.encode('utf8')

        # check file size
        #
        if totalSize > self._max_file_size:
            msg = "Size {} of file to be uploaded exceeds maximum {}".format(
                totalSize, self._max_file_size)
            self.log.debug(msg)
            # 413 Request Entity Too Large
            request.setResponseCode(413, msg.encode('utf8'))
            return msg.encode('utf8')

        # check file extensions
        #
        extension = os.path.splitext(filename)[1]
        if self._fileTypes and extension not in self._fileTypes:
            msg = "Type '{}' of file to be uploaded is in allowed types {}".format(
                extension, self._fileTypes)
            self.log.debug(msg)
            # 415 Unsupported Media Type
            request.setResponseCode(415, msg.encode('utf8'))
            return msg.encode('utf8')

        # TODO: check mime type
        #
        fileTempDir = self._tempDirRoot.child(fileId)
        chunkName = fileTempDir.child('chunk_' + str(chunkNumber))
        _chunkName = fileTempDir.child('#kfhf3kz412uru578e38viokbjhfvz4w__' +
                                       'chunk_' + str(chunkNumber))

        def mergeFile():
            # every chunk has to check if it is the last chunk written, except in a single chunk scenario
            if totalChunks > 1 and len(
                    self._uploads[fileId]['chunk_list']) >= totalChunks:
                # last chunk
                self.log.debug(
                    'Finished file upload after chunk {chunk_number} with chunk_list {chunk_list}',
                    chunk_number=chunkNumber,
                    chunk_list=self._uploads)

                # Merge all files into one file and remove the temp files
                # TODO: How to avoid the extra file IO ?
                finalFileName = self._uploadRoot.child(fileId)
                _finalFileName = fileTempDir.child(
                    '#kfhf3kz412uru578e38viokbjhfvz4w__' + fileId)
                with open(_finalFileName.path, 'wb') as _finalFile:
                    for cn in range(1, totalChunks + 1):
                        with open(
                                fileTempDir.child('chunk_' + str(cn)).path,
                                'rb') as ff:
                            _finalFile.write(ff.read())

                _finalFileName.moveTo(finalFileName)

                if self._file_permissions:
                    perm = int(self._file_permissions, 8)
                    try:
                        finalFileName.chmod(perm)
                    except Exception as e:
                        msg = "file upload resource - could not change file permissions of uploaded file"
                        self.log.debug(msg)
                        self.log.debug(e)
                        self._uploads.pop(fileId, None)
                        request.setResponseCode(500, msg.encode('utf8'))
                        return msg.encode('utf8')
                    else:
                        self.log.debug(
                            "Changed permissions on {file_name} to {permissions}",
                            file_name=finalFileName,
                            permissions=self._file_permissions)

                # remove the file temp folder
                fileTempDir.remove()

                self._uploads.pop(fileId, None)

                # publish file upload progress to file_progress_URI
                fileupload_publish({
                    "id": fileId,
                    "chunk": chunkNumber,
                    "name": filename,
                    "total": totalSize,
                    "remaining": 0,
                    "status": "finished",
                    "progress": 1.,
                    "finish_extra": finish_extra,
                    "chunk_extra": chunk_extra
                })

        if chunk_is_first:
            # first chunk of file

            # publish file upload start
            #
            fileupload_publish({
                "id": fileId,
                "chunk": chunkNumber,
                "name": filename,
                "total": totalSize,
                "remaining": totalSize,
                "status": "started",
                "progress": 0.,
                "chunk_extra": chunk_extra
            })

            if totalChunks == 1:
                # only one chunk overall -> write file directly
                finalFileName = self._uploadRoot.child(fileId)
                _finalFileName = self._tempDirRoot.child(
                    '#kfhf3kz412uru578e38viokbjhfvz4w__' + fileId)

                with open(_finalFileName.path, 'wb') as _finalFile:
                    _finalFile.write(fileContent)

                if self._file_permissions:
                    perm = int(self._file_permissions, 8)
                    try:
                        _finalFileName.chmod(perm)
                    except Exception as e:
                        # finalFileName.remove()
                        msg = "Could not change file permissions of uploaded file"
                        self.log.debug(msg)
                        self.log.debug(e)
                        request.setResponseCode(500, msg.encode('utf8'))
                        return msg.encode('utf8')
                    else:
                        self.log.debug(
                            "Changed permissions on {file_name} to {permissions}",
                            file_name=finalFileName,
                            permissions=self._file_permissions)

                _finalFileName.moveTo(finalFileName)
                if chunkNumber not in self._uploads[fileId]['chunk_list']:
                    self._uploads[fileId]['chunk_list'].append(chunkNumber)

                self._uploads.pop(fileId, None)

                # publish file upload progress to file_progress_URI
                fileupload_publish({
                    "id": fileId,
                    "chunk": chunkNumber,
                    "name": filename,
                    "total": totalSize,
                    "remaining": 0,
                    "status": "finished",
                    "progress": 1.,
                    "finish_extra": finish_extra,
                    "chunk_extra": chunk_extra
                })

            else:
                # first of more chunks
                # fileTempDir.remove()  # any potential conflict should have been resolved above. This should not be necessary!
                if not os.path.isdir(fileTempDir.path):
                    fileTempDir.makedirs()

                with open(_chunkName.path, 'wb') as chunk:
                    chunk.write(fileContent)
                _chunkName.moveTo(chunkName)  # atomic file system operation
                self.log.debug('chunk_' + str(chunkNumber) +
                               ' written and moved to ' + chunkName.path)
                # publish file upload progress
                #
                fileupload_publish({
                    "id":
                    fileId,
                    "chunk":
                    chunkNumber,
                    "name":
                    filename,
                    "total":
                    totalSize,
                    "remaining":
                    totalSize - chunkSize,
                    "status":
                    "progress",
                    "progress":
                    round(float(chunkSize) / float(totalSize), 3),
                    "chunk_extra":
                    chunk_extra
                })
                if chunkNumber not in self._uploads[fileId]['chunk_list']:
                    self._uploads[fileId]['chunk_list'].append(chunkNumber)
                mergeFile()
            # clean the temp dir once per file upload
            self._remove_stale_uploads()

        else:
            # intermediate chunk
            if not os.path.isdir(fileTempDir.path):
                fileTempDir.makedirs()

            with open(_chunkName.path, 'wb') as chunk:
                chunk.write(fileContent)
            _chunkName.moveTo(chunkName)
            self.log.debug('chunk_' + str(chunkNumber) +
                           ' written and moved to ' + chunkName.path)

            if chunkNumber not in self._uploads[fileId]['chunk_list']:
                self._uploads[fileId]['chunk_list'].append(chunkNumber)

            received = sum(
                fileTempDir.child(f).getsize() for f in fileTempDir.listdir())

            fileupload_publish({
                "id":
                fileId,
                "chunk":
                chunkNumber,
                "name":
                filename,
                "total":
                totalSize,
                "remaining":
                totalSize - received,
                "status":
                "progress",
                "progress":
                round(float(received) / float(totalSize), 3),
                "chunk_extra":
                chunk_extra
            })
            mergeFile()
        # no errors encountered -> respond success
        request.setResponseCode(200)
        return b''

    def _remove_stale_uploads(self):
        """
        This only works if there is a temp folder exclusive for crossbar file uploads
        if the system temp folder is used then crossbar creates a "crossbar-uploads" there and
        uses that as the temp folder for uploads
        If you don't clean up regularly an attacker could fill up the OS file system
        """
        for fileTempDir in self._tempDirRoot.children():
            self.log.debug('REMOVE STALE UPLOADS ' +
                           str(fileTempDir.basename()))
            if fileTempDir.isdir() and (
                    fileTempDir.basename()) not in self._uploads:
                fileTempDir.remove()

    def render_GET(self, request):
        """
        This method can be used to check whether a chunk has been uploaded already.
        It returns with HTTP status code `200` if yes and `404` if not.
        The request needs to contain the file identifier and the chunk number to check for.
        """
        for param in ['file_name', 'chunk_number']:
            if not self._form_fields[param].encode(
                    'iso-8859-1') in request.args:
                msg = "file upload resource - missing request query parameter '{}', configured from '{}'".format(
                    self._form_fields[param], param)
                self.log.debug(msg)
                # 400 Bad Request
                request.setResponseCode(400, msg.encode('utf8'))
                return msg.encode('utf8')

        file_name = request.args[self._form_fields['file_name'].encode(
            'iso-8859-1')][0].decode('utf8')
        chunk_number = int(
            request.args[self._form_fields['chunk_number'].encode(
                'iso-8859-1')][0].decode('utf8'))

        # a complete upload will be repeated an incomplete upload will be resumed
        if file_name in self._uploads and chunk_number in self._uploads[
                file_name]['chunk_list']:
            self.log.debug(
                "Skipping chunk upload {file_name} of chunk {chunk_number}",
                file_name=file_name,
                chunk_number=chunk_number)
            msg = b"chunk of file already uploaded"
            request.setResponseCode(200, msg)
            return msg
        else:
            msg = b"chunk of file not yet uploaded"
            request.setResponseCode(404, msg)
            return msg
示例#41
0
def run(args=None):
    """
    Parses the configuration options in C{args} and runs C{dpkg-buildpackage}
    to create a .deb file.

    @param args: List of strings representing the C{tap2deb} configuration
        options.
    @type args: L{list}
    """
    try:
        config = MyOptions()
        config.parseOptions(args)
    except usage.error as ue:
        sys.exit("%s: %s" % (sys.argv[0], ue))

    tapFile = config['tapfile']
    baseTapFile = os.path.basename(config['tapfile'])
    protocol = (config['protocol'] or os.path.splitext(baseTapFile)[0])
    debFile = config['debfile'] or 'twisted-' + protocol
    version = config['set-version']
    maintainer = config['maintainer']
    description = config['description'] or (
        'A Twisted-based server for %(protocol)s' % vars())
    longDescription = config['long_description'] or\
        'Automatically created by tap2deb'
    twistdOption = type_dict[config['type']]
    date = now()
    directory = debFile + '-' + version
    pythonVersion = '%s.%s' % sys.version_info[:2]
    buildDir = FilePath('.build').child(directory)

    if buildDir.exists():
        buildDir.remove()

    debianDir = buildDir.child('debian')
    debianDir.child('source').makedirs()
    shutil.copy(tapFile, buildDir.path)

    debianDir.child('README.Debian').setContent(
        '''This package was auto-generated by tap2deb\n''')

    debianDir.child('conffiles').setContent('''\
/etc/init.d/%(debFile)s
/etc/default/%(debFile)s
/etc/%(baseTapFile)s
''' % vars())

    debianDir.child('default').setContent('''\
pidfile=/var/run/%(debFile)s.pid
rundir=/var/lib/%(debFile)s/
file=/etc/%(tapFile)s
logfile=/var/log/%(debFile)s.log
 ''' % vars())

    debianDir.child('init.d').setContent('''\
#!/bin/sh

PATH=/sbin:/bin:/usr/sbin:/usr/bin

pidfile=/var/run/%(debFile)s.pid \
rundir=/var/lib/%(debFile)s/ \
file=/etc/%(tapFile)s \
logfile=/var/log/%(debFile)s.log

[ -r /etc/default/%(debFile)s ] && . /etc/default/%(debFile)s

test -x /usr/bin/twistd%(pythonVersion)s || exit 0
test -r $file || exit 0
test -r /usr/share/%(debFile)s/package-installed || exit 0


case "$1" in
    start)
        echo -n "Starting %(debFile)s: twistd"
        start-stop-daemon --start --quiet --exec /usr/bin/twistd%(pythonVersion)s -- \
                          --pidfile=$pidfile \
                          --rundir=$rundir \
                          --%(twistdOption)s=$file \
                          --logfile=$logfile
        echo "."
    ;;

    stop)
        echo -n "Stopping %(debFile)s: twistd"
        start-stop-daemon --stop --quiet  \
            --pidfile $pidfile
        echo "."
    ;;

    restart)
        $0 stop
        $0 start
    ;;

    force-reload)
        $0 restart
    ;;

    *)
        echo "Usage: /etc/init.d/%(debFile)s {start|stop|restart|force-reload}" >&2
        exit 1
    ;;
esac

exit 0
''' % vars())

    debianDir.child('init.d').chmod(0755)

    debianDir.child('postinst').setContent('''\
#!/bin/sh
update-rc.d %(debFile)s defaults >/dev/null
invoke-rc.d %(debFile)s start
''' % vars())

    debianDir.child('prerm').setContent('''\
#!/bin/sh
invoke-rc.d %(debFile)s stop
''' % vars())

    debianDir.child('postrm').setContent('''\
#!/bin/sh
if [ "$1" = purge ]; then
        update-rc.d %(debFile)s remove >/dev/null
fi
''' % vars())

    debianDir.child('changelog').setContent('''\
%(debFile)s (%(version)s) unstable; urgency=low

  * Created by tap2deb

 -- %(maintainer)s  %(date)s

''' % vars())

    debianDir.child('control').setContent('''\
Source: %(debFile)s
Section: net
Priority: extra
Maintainer: %(maintainer)s
Build-Depends-Indep: debhelper
Standards-Version: 3.5.6

Package: %(debFile)s
Architecture: all
Depends: python%(pythonVersion)s-twisted
Description: %(description)s
 %(longDescription)s
''' % vars())

    debianDir.child('copyright').setContent('''\
This package was auto-debianized by %(maintainer)s on
%(date)s

It was auto-generated by tap2deb

Upstream Author(s): 
Moshe Zadka <*****@*****.**> -- tap2deb author

Copyright:

Insert copyright here.
''' % vars())

    debianDir.child('dirs').setContent('''\
etc/init.d
etc/default
var/lib/%(debFile)s
usr/share/doc/%(debFile)s
usr/share/%(debFile)s
''' % vars())

    debianDir.child('rules').setContent('''\
#!/usr/bin/make -f

export DH_COMPAT=1

build: build-stamp
build-stamp:
	dh_testdir
	touch build-stamp

clean:
	dh_testdir
	dh_testroot
	rm -f build-stamp install-stamp
	dh_clean

install: install-stamp
install-stamp: build-stamp
	dh_testdir
	dh_testroot
	dh_clean -k
	dh_installdirs

	# Add here commands to install the package into debian/tmp.
	cp %(baseTapFile)s debian/tmp/etc/
	cp debian/init.d debian/tmp/etc/init.d/%(debFile)s
	cp debian/default debian/tmp/etc/default/%(debFile)s
	cp debian/copyright debian/tmp/usr/share/doc/%(debFile)s/
	cp debian/README.Debian debian/tmp/usr/share/doc/%(debFile)s/
	touch debian/tmp/usr/share/%(debFile)s/package-installed
	touch install-stamp

binary-arch: build install

binary-indep: build install
	dh_testdir
	dh_testroot
	dh_strip
	dh_compress
	dh_installchangelogs
	dh_fixperms
	dh_installdeb
	dh_shlibdeps
	dh_gencontrol
	dh_md5sums
	dh_builddeb

source diff:
	@echo >&2 'source and diff are obsolete - use dpkg-source -b'; false

binary: binary-indep binary-arch
.PHONY: build clean binary-indep binary-arch binary install
''' % vars())

    debianDir.child('rules').chmod(0755)

    args = ["dpkg-buildpackage", "-rfakeroot"]
    if config['unsigned']:
        args = args + ['-uc', '-us']

    # Build deb
    job = subprocess.Popen(args,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.STDOUT,
                           cwd=buildDir.path)
    stdout, _ = job.communicate()
示例#42
0
class DeveloperSetupTests(unittest.TestCase):
    """
    These tests verify things about the plugin system without actually
    interacting with the deployed 'twisted.plugins' package, instead creating a
    temporary package.
    """
    def setUp(self):
        """
        Create a complex environment with multiple entries on sys.path, akin to
        a developer's environment who has a development (trunk) checkout of
        Twisted, a system installed version of Twisted (for their operating
        system's tools) and a project which provides Twisted plugins.
        """
        self.savedPath = sys.path[:]
        self.savedModules = sys.modules.copy()
        self.fakeRoot = FilePath(self.mktemp())
        self.fakeRoot.createDirectory()
        self.systemPath = self.fakeRoot.child('system_path')
        self.devPath = self.fakeRoot.child('development_path')
        self.appPath = self.fakeRoot.child('application_path')
        self.systemPackage = _createPluginDummy(self.systemPath,
                                                pluginFileContents('system'),
                                                True, 'plugindummy_builtin')
        self.devPackage = _createPluginDummy(self.devPath,
                                             pluginFileContents('dev'), True,
                                             'plugindummy_builtin')
        self.appPackage = _createPluginDummy(self.appPath,
                                             pluginFileContents('app'), False,
                                             'plugindummy_app')

        # Now we're going to do the system installation.
        sys.path.extend([x.path for x in [self.systemPath, self.appPath]])
        # Run all the way through the plugins list to cause the
        # L{plugin.getPlugins} generator to write cache files for the system
        # installation.
        self.getAllPlugins()
        self.sysplug = self.systemPath.child('plugindummy').child('plugins')
        self.syscache = self.sysplug.child('dropin.cache')
        # Make sure there's a nice big difference in modification times so that
        # we won't re-build the system cache.
        now = time.time()
        os.utime(
            self.sysplug.child('plugindummy_builtin.py').path,
            (now - 5000, ) * 2)
        os.utime(self.syscache.path, (now - 2000, ) * 2)
        # For extra realism, let's make sure that the system path is no longer
        # writable.
        self.lockSystem()
        self.resetEnvironment()

    def lockSystem(self):
        """
        Lock the system directories, as if they were unwritable by this user.
        """
        os.chmod(self.sysplug.path, 0o555)
        os.chmod(self.syscache.path, 0o555)

    def unlockSystem(self):
        """
        Unlock the system directories, as if they were writable by this user.
        """
        os.chmod(self.sysplug.path, 0o777)
        os.chmod(self.syscache.path, 0o777)

    def getAllPlugins(self):
        """
        Get all the plugins loadable from our dummy package, and return their
        short names.
        """
        # Import the module we just added to our path.  (Local scope because
        # this package doesn't exist outside of this test.)
        import plugindummy.plugins
        x = list(plugin.getPlugins(ITestPlugin, plugindummy.plugins))
        return [plug.__name__ for plug in x]

    def resetEnvironment(self):
        """
        Change the environment to what it should be just as the test is
        starting.
        """
        self.unsetEnvironment()
        sys.path.extend(
            [x.path for x in [self.devPath, self.systemPath, self.appPath]])

    def unsetEnvironment(self):
        """
        Change the Python environment back to what it was before the test was
        started.
        """
        invalidateImportCaches()
        sys.modules.clear()
        sys.modules.update(self.savedModules)
        sys.path[:] = self.savedPath

    def tearDown(self):
        """
        Reset the Python environment to what it was before this test ran, and
        restore permissions on files which were marked read-only so that the
        directory may be cleanly cleaned up.
        """
        self.unsetEnvironment()
        # Normally we wouldn't "clean up" the filesystem like this (leaving
        # things for post-test inspection), but if we left the permissions the
        # way they were, we'd be leaving files around that the buildbots
        # couldn't delete, and that would be bad.
        self.unlockSystem()

    def test_developmentPluginAvailability(self):
        """
        Plugins added in the development path should be loadable, even when
        the (now non-importable) system path contains its own idea of the
        list of plugins for a package.  Inversely, plugins added in the
        system path should not be available.
        """
        # Run 3 times: uncached, cached, and then cached again to make sure we
        # didn't overwrite / corrupt the cache on the cached try.
        for x in range(3):
            names = self.getAllPlugins()
            names.sort()
            self.assertEqual(names, ['app', 'dev'])

    def test_freshPyReplacesStalePyc(self):
        """
        Verify that if a stale .pyc file on the PYTHONPATH is replaced by a
        fresh .py file, the plugins in the new .py are picked up rather than
        the stale .pyc, even if the .pyc is still around.
        """
        mypath = self.appPackage.child("stale.py")
        mypath.setContent(pluginFileContents('one'))
        # Make it super stale
        x = time.time() - 1000
        os.utime(mypath.path, (x, x))
        pyc = mypath.sibling('stale.pyc')
        # compile it
        # On python 3, don't use the __pycache__ directory; the intention
        # of scanning for .pyc files is for configurations where you want
        # to intentionally include them, which means we _don't_ scan for
        # them inside cache directories.
        extra = dict(legacy=True)
        compileall.compile_dir(self.appPackage.path, quiet=1, **extra)
        os.utime(pyc.path, (x, x))
        # Eliminate the other option.
        mypath.remove()
        # Make sure it's the .pyc path getting cached.
        self.resetEnvironment()
        # Sanity check.
        self.assertIn('one', self.getAllPlugins())
        self.failIfIn('two', self.getAllPlugins())
        self.resetEnvironment()
        mypath.setContent(pluginFileContents('two'))
        self.failIfIn('one', self.getAllPlugins())
        self.assertIn('two', self.getAllPlugins())

    def test_newPluginsOnReadOnlyPath(self):
        """
        Verify that a failure to write the dropin.cache file on a read-only
        path will not affect the list of plugins returned.

        Note: this test should pass on both Linux and Windows, but may not
        provide useful coverage on Windows due to the different meaning of
        "read-only directory".
        """
        self.unlockSystem()
        self.sysplug.child('newstuff.py').setContent(pluginFileContents('one'))
        self.lockSystem()

        # Take the developer path out, so that the system plugins are actually
        # examined.
        sys.path.remove(self.devPath.path)

        # Start observing log events to see the warning
        events = []
        addObserver(events.append)
        self.addCleanup(removeObserver, events.append)

        self.assertIn('one', self.getAllPlugins())

        # Make sure something was logged about the cache.
        expected = "Unable to write to plugin cache %s: error number %d" % (
            self.syscache.path, errno.EPERM)
        for event in events:
            if expected in textFromEventDict(event):
                break
        else:
            self.fail("Did not observe unwriteable cache warning in log "
                      "events: %r" % (events, ))
示例#43
0
class SSHPublicKeyDatabaseTests(TestCase):
    """
    Tests for L{SSHPublicKeyDatabase}.
    """

    if pwd is None:
        skip = "Cannot run without pwd module"
    elif SSHPublicKeyDatabase is None:
        skip = "Cannot run without PyCrypto"

    def setUp(self):
        self.checker = SSHPublicKeyDatabase()
        self.sshDir = FilePath(self.mktemp())
        self.sshDir.makedirs()

        self.key1 = base64.encodestring("foobar")
        self.key2 = base64.encodestring("eggspam")
        self.content = "t1 %s foo\nt2 %s egg\n" % (self.key1, self.key2)

        self.mockos = MockOS()
        self.mockos.path = self.sshDir.path
        self.patch(os.path, "expanduser", self.mockos.expanduser)
        self.patch(pwd, "getpwnam", self.mockos.getpwnam)
        self.patch(os, "seteuid", self.mockos.seteuid)
        self.patch(os, "setegid", self.mockos.setegid)

    def _testCheckKey(self, filename):
        self.sshDir.child(filename).setContent(self.content)
        user = UsernamePassword("user", "password")
        user.blob = "foobar"
        self.assertTrue(self.checker.checkKey(user))
        user.blob = "eggspam"
        self.assertTrue(self.checker.checkKey(user))
        user.blob = "notallowed"
        self.assertFalse(self.checker.checkKey(user))

    def test_checkKey(self):
        """
        L{SSHPublicKeyDatabase.checkKey} should retrieve the content of the
        authorized_keys file and check the keys against that file.
        """
        self._testCheckKey("authorized_keys")
        self.assertEquals(self.mockos.seteuidCalls, [])
        self.assertEquals(self.mockos.setegidCalls, [])

    def test_checkKey2(self):
        """
        L{SSHPublicKeyDatabase.checkKey} should retrieve the content of the
        authorized_keys2 file and check the keys against that file.
        """
        self._testCheckKey("authorized_keys2")
        self.assertEquals(self.mockos.seteuidCalls, [])
        self.assertEquals(self.mockos.setegidCalls, [])

    def test_checkKeyAsRoot(self):
        """
        If the key file is readable, L{SSHPublicKeyDatabase.checkKey} should
        switch its uid/gid to the ones of the authenticated user.
        """
        keyFile = self.sshDir.child("authorized_keys")
        keyFile.setContent(self.content)
        # Fake permission error by changing the mode
        keyFile.chmod(0000)
        self.addCleanup(keyFile.chmod, 0777)
        # And restore the right mode when seteuid is called
        savedSeteuid = os.seteuid

        def seteuid(euid):
            keyFile.chmod(0777)
            return savedSeteuid(euid)

        self.patch(os, "seteuid", seteuid)
        user = UsernamePassword("user", "password")
        user.blob = "foobar"
        self.assertTrue(self.checker.checkKey(user))
        self.assertEquals(self.mockos.seteuidCalls, [0, 1, 0, os.getuid()])
        self.assertEquals(self.mockos.setegidCalls, [2, os.getgid()])
示例#44
0
def _run(package,
         path,
         newversion,
         patch,
         rc,
         post,
         dev,
         create,
         _date=None,
         _getcwd=None,
         _print=print):

    if not _getcwd:
        _getcwd = os.getcwd

    if not _date:
        _date = datetime.date.today()

    if type(package) != str:
        package = package.encode('utf8')

    if not path:
        path = _findPath(_getcwd(), package)
    else:
        path = FilePath(path)

    if newversion and patch or newversion and dev or newversion and rc or \
       newversion and post:
        raise ValueError("Only give --newversion")

    if dev and patch or dev and rc or dev and post:
        raise ValueError("Only give --dev")

    if create and dev or create and patch or create and rc or \
       create and post or create and newversion:
        raise ValueError("Only give --create")

    if newversion:
        from pkg_resources import parse_version
        existing = _existing_version(path)
        st_version = parse_version(newversion)._version

        release = list(st_version.release)

        if len(release) == 1:
            release.append(0)
        if len(release) == 2:
            release.append(0)

        v = Version(
            package,
            *release,
            release_candidate=st_version.pre[1] if st_version.pre else None,
            post=st_version.post[1] if st_version.post else None,
            dev=st_version.dev[1] if st_version.dev else None)

    elif create:
        v = Version(package, _date.year - _YEAR_START, _date.month, 0)
        existing = v

    elif rc and not patch:
        existing = _existing_version(path)

        if existing.release_candidate:
            v = Version(package, existing.major, existing.minor,
                        existing.micro, existing.release_candidate + 1)
        else:
            v = Version(package, _date.year - _YEAR_START, _date.month, 0, 1)

    elif patch:
        if rc:
            rc = 1
        else:
            rc = None

        existing = _existing_version(path)
        v = Version(package, existing.major, existing.minor,
                    existing.micro + 1, rc)

    elif post:
        existing = _existing_version(path)

        if existing.post is None:
            _post = 0
        else:
            _post = existing.post + 1

        v = Version(package,
                    existing.major,
                    existing.minor,
                    existing.micro,
                    post=_post)

    elif dev:
        existing = _existing_version(path)

        if existing.dev is None:
            _dev = 0
        else:
            _dev = existing.dev + 1

        v = Version(package,
                    existing.major,
                    existing.minor,
                    existing.micro,
                    existing.release_candidate,
                    dev=_dev)

    else:
        existing = _existing_version(path)

        if existing.release_candidate:
            v = Version(package, existing.major, existing.minor,
                        existing.micro)
        else:
            raise ValueError(
                "You need to issue a rc before updating the major/minor")

    NEXT_repr = repr(Version(package, "NEXT", 0, 0)).split("#")[0]
    NEXT_repr_bytes = NEXT_repr.encode('utf8')

    version_repr = repr(v).split("#")[0]
    version_repr_bytes = version_repr.encode('utf8')

    existing_version_repr = repr(existing).split("#")[0]
    existing_version_repr_bytes = existing_version_repr.encode('utf8')

    _print("Updating codebase to %s" % (v.public()))

    for x in path.walk():

        if not x.isfile():
            continue

        original_content = x.getContent()
        content = original_content

        # Replace previous release_candidate calls to the new one
        if existing.release_candidate:
            content = content.replace(existing_version_repr_bytes,
                                      version_repr_bytes)
            content = content.replace(
                (package.encode('utf8') + b" " +
                 existing.public().encode('utf8')),
                (package.encode('utf8') + b" " + v.public().encode('utf8')))

        # Replace NEXT Version calls with the new one
        content = content.replace(NEXT_repr_bytes, version_repr_bytes)
        content = content.replace(NEXT_repr_bytes.replace(b"'", b'"'),
                                  version_repr_bytes)

        # Replace <package> NEXT with <package> <public>
        content = content.replace(
            package.encode('utf8') + b" NEXT",
            (package.encode('utf8') + b" " + v.public().encode('utf8')))

        if content != original_content:
            _print("Updating %s" % (x.path, ))
            with x.open('w') as f:
                f.write(content)

    _print("Updating %s/_version.py" % (path.path))
    with path.child("_version.py").open('w') as f:
        f.write((_VERSIONPY_TEMPLATE %
                 (package, package, version_repr)).encode('utf8'))
示例#45
0
class WarnAboutFunctionTests(SynchronousTestCase):
    """
    Tests for L{twisted.python.deprecate.warnAboutFunction} which allows the
    callers of a function to issue a C{DeprecationWarning} about that function.
    """
    def setUp(self):
        """
        Create a file that will have known line numbers when emitting warnings.
        """
        self.package = FilePath(
            self.mktemp().encode("utf-8")).child(b'twisted_private_helper')
        self.package.makedirs()
        self.package.child(b'__init__.py').setContent(b'')
        self.package.child(b'module.py').setContent(b'''
"A module string"

from twisted.python import deprecate

def testFunction():
    "A doc string"
    a = 1 + 2
    return a

def callTestFunction():
    b = testFunction()
    if b == 3:
        deprecate.warnAboutFunction(testFunction, "A Warning String")
''')
        # Python 3 doesn't accept bytes in sys.path:
        packagePath = self.package.parent().path.decode("utf-8")
        sys.path.insert(0, packagePath)
        self.addCleanup(sys.path.remove, packagePath)

        modules = sys.modules.copy()
        self.addCleanup(lambda:
                        (sys.modules.clear(), sys.modules.update(modules)))

    def test_warning(self):
        """
        L{deprecate.warnAboutFunction} emits a warning the file and line number
        of which point to the beginning of the implementation of the function
        passed to it.
        """
        def aFunc():
            pass

        deprecate.warnAboutFunction(aFunc, 'A Warning Message')
        warningsShown = self.flushWarnings()
        filename = __file__
        if filename.lower().endswith('.pyc'):
            filename = filename[:-1]
        self.assertSamePath(FilePath(warningsShown[0]["filename"]),
                            FilePath(filename))
        self.assertEqual(warningsShown[0]["message"], "A Warning Message")

    def test_warningLineNumber(self):
        """
        L{deprecate.warnAboutFunction} emits a C{DeprecationWarning} with the
        number of a line within the implementation of the function passed to it.
        """
        from twisted_private_helper import module
        module.callTestFunction()
        warningsShown = self.flushWarnings()
        self.assertSamePath(
            FilePath(warningsShown[0]["filename"].encode("utf-8")),
            self.package.sibling(b'twisted_private_helper').child(
                b'module.py'))
        # Line number 9 is the last line in the testFunction in the helper
        # module.
        self.assertEqual(warningsShown[0]["lineno"], 9)
        self.assertEqual(warningsShown[0]["message"], "A Warning String")
        self.assertEqual(len(warningsShown), 1)

    def assertSamePath(self, first, second):
        """
        Assert that the two paths are the same, considering case normalization
        appropriate for the current platform.

        @type first: L{FilePath}
        @type second: L{FilePath}

        @raise C{self.failureType}: If the paths are not the same.
        """
        self.assertTrue(
            normcase(first.path) == normcase(second.path),
            "%r != %r" % (first, second))

    def test_renamedFile(self):
        """
        Even if the implementation of a deprecated function is moved around on
        the filesystem, the line number in the warning emitted by
        L{deprecate.warnAboutFunction} points to a line in the implementation of
        the deprecated function.
        """
        from twisted_private_helper import module
        # Clean up the state resulting from that import; we're not going to use
        # this module, so it should go away.
        del sys.modules['twisted_private_helper']
        del sys.modules[module.__name__]

        # Rename the source directory
        self.package.moveTo(self.package.sibling(b'twisted_renamed_helper'))

        # Make sure importlib notices we've changed importable packages:
        if invalidate_caches:
            invalidate_caches()

        # Import the newly renamed version
        from twisted_renamed_helper import module
        self.addCleanup(sys.modules.pop, 'twisted_renamed_helper')
        self.addCleanup(sys.modules.pop, module.__name__)

        module.callTestFunction()
        warningsShown = self.flushWarnings()
        warnedPath = FilePath(warningsShown[0]["filename"].encode("utf-8"))
        expectedPath = self.package.sibling(b'twisted_renamed_helper').child(
            b'module.py')
        self.assertSamePath(warnedPath, expectedPath)
        self.assertEqual(warningsShown[0]["lineno"], 9)
        self.assertEqual(warningsShown[0]["message"], "A Warning String")
        self.assertEqual(len(warningsShown), 1)

    def test_filteredWarning(self):
        """
        L{deprecate.warnAboutFunction} emits a warning that will be filtered if
        L{warnings.filterwarning} is called with the module name of the
        deprecated function.
        """
        # Clean up anything *else* that might spuriously filter out the warning,
        # such as the "always" simplefilter set up by unittest._collectWarnings.
        # We'll also rely on trial to restore the original filters afterwards.
        del warnings.filters[:]

        warnings.filterwarnings(action="ignore",
                                module="twisted_private_helper")

        from twisted_private_helper import module
        module.callTestFunction()

        warningsShown = self.flushWarnings()
        self.assertEqual(len(warningsShown), 0)

    def test_filteredOnceWarning(self):
        """
        L{deprecate.warnAboutFunction} emits a warning that will be filtered
        once if L{warnings.filterwarning} is called with the module name of the
        deprecated function and an action of once.
        """
        # Clean up anything *else* that might spuriously filter out the warning,
        # such as the "always" simplefilter set up by unittest._collectWarnings.
        # We'll also rely on trial to restore the original filters afterwards.
        del warnings.filters[:]

        warnings.filterwarnings(action="module",
                                module="twisted_private_helper")

        from twisted_private_helper import module
        module.callTestFunction()
        module.callTestFunction()

        warningsShown = self.flushWarnings()
        self.assertEqual(len(warningsShown), 1)
        message = warningsShown[0]['message']
        category = warningsShown[0]['category']
        filename = warningsShown[0]['filename']
        lineno = warningsShown[0]['lineno']
        msg = warnings.formatwarning(message, category, filename, lineno)
        self.assertTrue(
            msg.endswith("module.py:9: DeprecationWarning: A Warning String\n"
                         "  return a\n"),
            "Unexpected warning string: %r" % (msg, ))
示例#46
0
    PackerOutputParser.

    ;ivar FilePath input: A file containing sample ``packer build
         -machine-readable`` output which will be fed into the parser.
    :ivar pmap output: The expected dictionary of the regional AMI values after
        parsing ``input``.
    """
    input = field(type=FilePath, mandatory=True)
    output = pmap_field(key_type=unicode, value_type=unicode, optional=False)


# These are sample outputs of running ``packer build -machine-readable`` with
# configuration files which result in Packer publishing AMI images to multiple
# or one Amazon region.
PACKER_OUTPUT_US_ALL = ParserData(
    input=PACKER_OUTPUTS.child('PACKER_OUTPUT_US_ALL'),
    output={
        u"us-east-1": u"ami-dc4410b6",
        u"us-west-1": u"ami-e098f380",
        u"us-west-2": u"ami-8c8f90ed",
    })

PACKER_OUTPUT_US_WEST_1 = ParserData(
    input=PACKER_OUTPUTS.child('PACKER_OUTPUT_US_WEST_1'),
    output={u"us-west-1": u"ami-e098f380"})

# This is an example of running ``packer build -machine-readble`` with a
# configuration file that has no builders.
PACKER_OUTPUT_NONE = ParserData(
    input=PACKER_OUTPUTS.child('PACKER_OUTPUT_NONE'),
    output={},
示例#47
0
class PluginTests(unittest.TestCase):
    """
    Tests which verify the behavior of the current, active Twisted plugins
    directory.
    """
    def setUp(self):
        """
        Save C{sys.path} and C{sys.modules}, and create a package for tests.
        """
        self.originalPath = sys.path[:]
        self.savedModules = sys.modules.copy()

        self.root = FilePath(self.mktemp())
        self.root.createDirectory()
        self.package = self.root.child('mypackage')
        self.package.createDirectory()
        self.package.child('__init__.py').setContent(b"")

        FilePath(__file__).sibling('plugin_basic.py').copyTo(
            self.package.child('testplugin.py'))

        self.originalPlugin = "testplugin"

        sys.path.insert(0, self.root.path)
        import mypackage
        self.module = mypackage

    def tearDown(self):
        """
        Restore C{sys.path} and C{sys.modules} to their original values.
        """
        sys.path[:] = self.originalPath
        sys.modules.clear()
        sys.modules.update(self.savedModules)

    def _unimportPythonModule(self, module, deleteSource=False):
        modulePath = module.__name__.split('.')
        packageName = '.'.join(modulePath[:-1])
        moduleName = modulePath[-1]

        delattr(sys.modules[packageName], moduleName)
        del sys.modules[module.__name__]
        for ext in ['c', 'o'] + (deleteSource and [''] or []):
            try:
                os.remove(module.__file__ + ext)
            except OSError as ose:
                if ose.errno != errno.ENOENT:
                    raise

    def _clearCache(self):
        """
        Remove the plugins B{droping.cache} file.
        """
        self.package.child('dropin.cache').remove()

    def _withCacheness(meth: Callable):
        """
        This is a paranoid test wrapper, that calls C{meth} 2 times, clear the
        cache, and calls it 2 other times. It's supposed to ensure that the
        plugin system behaves correctly no matter what the state of the cache
        is.
        """
        @functools.wraps(meth)
        def wrapped(self):
            meth(self)
            meth(self)
            self._clearCache()
            meth(self)
            meth(self)

        return wrapped

    @_withCacheness
    def test_cache(self):
        """
        Check that the cache returned by L{plugin.getCache} hold the plugin
        B{testplugin}, and that this plugin has the properties we expect:
        provide L{TestPlugin}, has the good name and description, and can be
        loaded successfully.
        """
        cache = plugin.getCache(self.module)

        dropin = cache[self.originalPlugin]
        self.assertEqual(dropin.moduleName,
                         'mypackage.%s' % (self.originalPlugin, ))
        self.assertIn("I'm a test drop-in.", dropin.description)

        # Note, not the preferred way to get a plugin by its interface.
        p1 = [p for p in dropin.plugins if ITestPlugin in p.provided][0]
        self.assertIs(p1.dropin, dropin)
        self.assertEqual(p1.name, "TestPlugin")

        # Check the content of the description comes from the plugin module
        # docstring
        self.assertEqual(p1.description.strip(),
                         "A plugin used solely for testing purposes.")
        self.assertEqual(p1.provided, [ITestPlugin, plugin.IPlugin])
        realPlugin = p1.load()
        # The plugin should match the class present in sys.modules
        self.assertIs(
            realPlugin,
            sys.modules['mypackage.%s' % (self.originalPlugin, )].TestPlugin)

        # And it should also match if we import it classicly
        import mypackage.testplugin as tp
        self.assertIs(realPlugin, tp.TestPlugin)

    def test_cacheRepr(self):
        """
        L{CachedPlugin} has a helpful C{repr} which contains relevant
        information about it.
        """
        cachedDropin = plugin.getCache(self.module)[self.originalPlugin]
        cachedPlugin = list(p for p in cachedDropin.plugins
                            if p.name == 'TestPlugin')[0]
        self.assertEqual(
            repr(cachedPlugin),
            "<CachedPlugin 'TestPlugin'/'mypackage.testplugin' "
            "(provides 'ITestPlugin, IPlugin')>")

    @_withCacheness
    def test_plugins(self):
        """
        L{plugin.getPlugins} should return the list of plugins matching the
        specified interface (here, L{ITestPlugin2}), and these plugins
        should be instances of classes with a C{test} method, to be sure
        L{plugin.getPlugins} load classes correctly.
        """
        plugins = list(plugin.getPlugins(ITestPlugin2, self.module))

        self.assertEqual(len(plugins), 2)

        names = ['AnotherTestPlugin', 'ThirdTestPlugin']
        for p in plugins:
            names.remove(p.__name__)
            p.test()

    @_withCacheness
    def test_detectNewFiles(self):
        """
        Check that L{plugin.getPlugins} is able to detect plugins added at
        runtime.
        """
        FilePath(__file__).sibling('plugin_extra1.py').copyTo(
            self.package.child('pluginextra.py'))
        try:
            # Check that the current situation is clean
            self.failIfIn('mypackage.pluginextra', sys.modules)
            self.assertFalse(hasattr(sys.modules['mypackage'], 'pluginextra'),
                             "mypackage still has pluginextra module")

            plgs = list(plugin.getPlugins(ITestPlugin, self.module))

            # We should find 2 plugins: the one in testplugin, and the one in
            # pluginextra
            self.assertEqual(len(plgs), 2)

            names = ['TestPlugin', 'FourthTestPlugin']
            for p in plgs:
                names.remove(p.__name__)
                p.test1()
        finally:
            self._unimportPythonModule(sys.modules['mypackage.pluginextra'],
                                       True)

    @_withCacheness
    def test_detectFilesChanged(self):
        """
        Check that if the content of a plugin change, L{plugin.getPlugins} is
        able to detect the new plugins added.
        """
        FilePath(__file__).sibling('plugin_extra1.py').copyTo(
            self.package.child('pluginextra.py'))
        try:
            plgs = list(plugin.getPlugins(ITestPlugin, self.module))
            # Sanity check
            self.assertEqual(len(plgs), 2)

            FilePath(__file__).sibling('plugin_extra2.py').copyTo(
                self.package.child('pluginextra.py'))

            # Fake out Python.
            self._unimportPythonModule(sys.modules['mypackage.pluginextra'])

            # Make sure additions are noticed
            plgs = list(plugin.getPlugins(ITestPlugin, self.module))

            self.assertEqual(len(plgs), 3)

            names = ['TestPlugin', 'FourthTestPlugin', 'FifthTestPlugin']
            for p in plgs:
                names.remove(p.__name__)
                p.test1()
        finally:
            self._unimportPythonModule(sys.modules['mypackage.pluginextra'],
                                       True)

    @_withCacheness
    def test_detectFilesRemoved(self):
        """
        Check that when a dropin file is removed, L{plugin.getPlugins} doesn't
        return it anymore.
        """
        FilePath(__file__).sibling('plugin_extra1.py').copyTo(
            self.package.child('pluginextra.py'))
        try:
            # Generate a cache with pluginextra in it.
            list(plugin.getPlugins(ITestPlugin, self.module))

        finally:
            self._unimportPythonModule(sys.modules['mypackage.pluginextra'],
                                       True)
        plgs = list(plugin.getPlugins(ITestPlugin, self.module))
        self.assertEqual(1, len(plgs))

    @_withCacheness
    def test_nonexistentPathEntry(self):
        """
        Test that getCache skips over any entries in a plugin package's
        C{__path__} which do not exist.
        """
        path = self.mktemp()
        self.assertFalse(os.path.exists(path))
        # Add the test directory to the plugins path
        self.module.__path__.append(path)
        try:
            plgs = list(plugin.getPlugins(ITestPlugin, self.module))
            self.assertEqual(len(plgs), 1)
        finally:
            self.module.__path__.remove(path)

    @_withCacheness
    def test_nonDirectoryChildEntry(self):
        """
        Test that getCache skips over any entries in a plugin package's
        C{__path__} which refer to children of paths which are not directories.
        """
        path = FilePath(self.mktemp())
        self.assertFalse(path.exists())
        path.touch()
        child = path.child("test_package").path
        self.module.__path__.append(child)
        try:
            plgs = list(plugin.getPlugins(ITestPlugin, self.module))
            self.assertEqual(len(plgs), 1)
        finally:
            self.module.__path__.remove(child)

    def test_deployedMode(self):
        """
        The C{dropin.cache} file may not be writable: the cache should still be
        attainable, but an error should be logged to show that the cache
        couldn't be updated.
        """
        # Generate the cache
        plugin.getCache(self.module)

        cachepath = self.package.child('dropin.cache')

        # Add a new plugin
        FilePath(__file__).sibling('plugin_extra1.py').copyTo(
            self.package.child('pluginextra.py'))
        invalidateImportCaches()

        os.chmod(self.package.path, 0o500)
        # Change the right of dropin.cache too for windows
        os.chmod(cachepath.path, 0o400)
        self.addCleanup(os.chmod, self.package.path, 0o700)
        self.addCleanup(os.chmod, cachepath.path, 0o700)

        # Start observing log events to see the warning
        events = []
        addObserver(events.append)
        self.addCleanup(removeObserver, events.append)

        cache = plugin.getCache(self.module)
        # The new plugin should be reported
        self.assertIn('pluginextra', cache)
        self.assertIn(self.originalPlugin, cache)

        # Make sure something was logged about the cache.
        expected = "Unable to write to plugin cache %s: error number %d" % (
            cachepath.path, errno.EPERM)
        for event in events:
            if expected in textFromEventDict(event):
                break
        else:
            self.fail("Did not observe unwriteable cache warning in log "
                      "events: %r" % (events, ))
示例#48
0
def install(index, password, master='build.staging.clusterhq.com'):
    """
    Install a buildslave with vagrant installed.
    """
    config = get_vagrant_config()

    run("wget -O /etc/yum.repos.d/virtualbox.repo http://download.virtualbox.org/virtualbox/rpm/fedora/virtualbox.repo"
        )  # noqa

    run("wget -O /etc/yum.repos.d/s3tools.repo http://s3tools.org/repo/RHEL_6/s3tools.repo"
        )  # noqa

    run("""
UNAME_R=$(uname -r)
PV=${UNAME_R%.*}
KV=${PV%%-*}
SV=${PV##*-}
ARCH=$(uname -m)
yum install -y https://kojipkgs.fedoraproject.org//packages/kernel/${KV}/${SV}/${ARCH}/kernel-devel-${UNAME_R}.rpm  # noqa
""")

    run('yum install -y dkms')
    packages = [
        "https://dl.bintray.com/mitchellh/vagrant/vagrant_1.6.5_x86_64.rpm",
        "VirtualBox-4.3.x86_64",
        "mongodb",
        "git",
        "libffi-devel",
        "python",
        "python-devel",
        "openssl-devel",
        "s3cmd",
    ]
    run("yum install -y " + " ".join(packages))

    sudo("curl -O https://bootstrap.pypa.io/get-pip.py")
    sudo("python get-pip.py")
    sudo("pip install buildbot-slave==0.8.10 virtualenv==13.1.0")

    run("useradd buildslave")
    sudo(
        "buildslave create-slave /home/buildslave/fedora-vagrant %(master)s fedora-20/vagrant/%(index)s %(password)s"  # noqa
        % {
            'index': index,
            'password': password,
            'master': master
        },
        user='******')
    put(FilePath(__file__).sibling('start').path,
        '/home/buildslave/fedora-vagrant/start',
        mode=0755)

    slave_home = FilePath('/home/buildslave')
    with settings(sudo_user='******',
                  shell_env={'HOME':
                             slave_home.path}), cd(slave_home.path):  # noqa
        sudo("vagrant plugin install vagrant-reload vagrant-vbguest")
    configure_acceptance()
    configure_s3cmd(config=config['boto'])

    put(
        FilePath(__file__).sibling('fedora-vagrant-slave.service').path,
        '/etc/systemd/system/fedora-vagrant-slave.service')
    put(
        FilePath(__file__).sibling('remove-old-boxes.py').path,
        slave_home.child('remove-old-boxes.py').path)

    run('systemctl start fedora-vagrant-slave')
    run('systemctl enable fedora-vagrant-slave')
class BuildScriptsTests(TestCase):
    """
    Tests for L{dist.build_scripts_twisted}.
    """

    def setUp(self):
        self.source = FilePath(self.mktemp())
        self.target = FilePath(self.mktemp())
        self.source.makedirs()
        self.addCleanup(os.chdir, os.getcwd())
        os.chdir(self.source.path)


    def buildScripts(self):
        """
        Write 3 types of scripts and run the L{build_scripts_twisted}
        command.
        """
        self.writeScript(self.source, "script1",
                          ("#! /usr/bin/env python2.7\n"
                           "# bogus script w/ Python sh-bang\n"
                           "pass\n"))

        self.writeScript(self.source, "script2.py",
                        ("#!/usr/bin/python\n"
                         "# bogus script w/ Python sh-bang\n"
                         "pass\n"))

        self.writeScript(self.source, "shell.sh",
                        ("#!/bin/sh\n"
                         "# bogus shell script w/ sh-bang\n"
                         "exit 0\n"))

        expected = ['script1', 'script2.py', 'shell.sh']
        cmd = self.getBuildScriptsCmd(self.target,
                                     [self.source.child(fn).path
                                      for fn in expected])
        cmd.finalize_options()
        cmd.run()

        return self.target.listdir()


    def getBuildScriptsCmd(self, target, scripts):
        """
        Create a distutils L{Distribution} with a L{DummyCommand} and wrap it
        in L{build_scripts_twisted}.

        @type target: L{FilePath}
        """
        dist = Distribution()
        dist.scripts = scripts
        dist.command_obj["build"] = DummyCommand(
            build_scripts = target.path,
            force = 1,
            executable = sys.executable
        )
        return build_scripts_twisted(dist)


    def writeScript(self, dir, name, text):
        """
        Write the script to disk.
        """
        with open(dir.child(name).path, "w") as f:
            f.write(text)


    def test_notWindows(self):
        """
        L{build_scripts_twisted} does not rename scripts on non-Windows
        platforms.
        """
        self.patch(os, "name", "twisted")
        built = self.buildScripts()
        for name in ['script1', 'script2.py', 'shell.sh']:
            self.assertTrue(name in built)


    def test_windows(self):
        """
        L{build_scripts_twisted} renames scripts so they end with '.py' on
        the Windows platform.
        """
        self.patch(os, "name", "nt")
        built = self.buildScripts()
        for name in ['script1.py', 'script2.py', 'shell.sh.py']:
            self.assertTrue(name in built)
class TestContentDirectoryServer(unittest.TestCase):
    def setUp(self):
        self.tmp_content = FilePath(self.mktemp())
        f = self.tmp_content.child('content')
        audio = f.child('audio')
        f.child('images').makedirs()
        f.child('video').makedirs()
        album = audio.child('album-1')
        album.makedirs()
        album.child('track-1.mp3').touch()
        album.child('track-2.mp3').touch()
        album = audio.child('album-2')
        album.makedirs()
        album.child('track-1.ogg').touch()
        album.child('track-2.ogg').touch()
        self.coherence = Coherence({
            'unittest': 'yes',
            'logmode': 'critical',
            'no-subsystem_log': {
                'controlpoint': 'error',
                'action': 'info',
                'soap': 'error'
            },
            'controlpoint': 'yes'
        })
        self.uuid = str(UUID())
        self.coherence.add_plugin('FSStore',
                                  name='MediaServer-%d' % os.getpid(),
                                  content=self.tmp_content.path,
                                  uuid=self.uuid,
                                  enable_inotify=False)

    def tearDown(self):
        self.tmp_content.remove()

        def cleaner(r):
            self.coherence.clear()
            return r

        dl = self.coherence.shutdown()
        dl.addBoth(cleaner)
        return dl

    def test_Browse(self):
        """ tries to find the activated FSStore backend
            and browses its root.
        """
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.browse(process_result=False)
            call.addCallback(got_first_answer, cdc)

        @wrapped(d)
        def got_first_answer(r, cdc):
            self.assertEqual(int(r['TotalMatches']), 1)
            didl = DIDLLite.DIDLElement.fromString(r['Result'])
            item = didl.getItems()[0]
            self.assertEqual(item.childCount, 3)
            call = cdc.browse(object_id=item.id, process_result=False)
            call.addCallback(got_second_answer, item.childCount)

        @wrapped(d)
        def got_second_answer(r, childcount):
            self.assertEqual(int(r['TotalMatches']), childcount)
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_Browse_Non_Existing_Object(self):
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.browse(object_id='9999.nothing', process_result=False)
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            self.assertIs(r, None)
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_Browse_Metadata(self):
        """ tries to find the activated FSStore backend
            and requests metadata for ObjectID 0.
        """
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            self.assertEqual(self.uuid, mediaserver.udn)
            cdc = mediaserver.client.content_directory
            call = cdc.browse(object_id='0',
                              browse_flag='BrowseMetadata',
                              process_result=False)
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            self.assertEqual(int(r['TotalMatches']), 1)
            didl = DIDLLite.DIDLElement.fromString(r['Result'])
            item = didl.getItems()[0]
            self.assertEqual(item.title, 'root')
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_XBOX_Browse(self):
        """ tries to find the activated FSStore backend
            and browses all audio files.
        """
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            mediaserver.client.overlay_headers = {
                'user-agent': 'Xbox/Coherence emulation'
            }
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.browse(object_id='4', process_result=False)
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            """ we expect four audio files here """
            self.assertEqual(int(r['TotalMatches']), 4)
            d.callback(None)

        d = Deferred()
        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_XBOX_Browse_Metadata(self):
        """ tries to find the activated FSStore backend
            and requests metadata for ObjectID 0.
        """
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            mediaserver.client.overlay_headers = {
                'user-agent': 'Xbox/Coherence emulation'
            }
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.browse(object_id='0',
                              browse_flag='BrowseMetadata',
                              process_result=False)
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            """ we expect one item here """
            self.assertEqual(int(r['TotalMatches']), 1)
            didl = DIDLLite.DIDLElement.fromString(r['Result'])
            item = didl.getItems()[0]
            self.assertEqual(item.title, 'root')
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_XBOX_Search(self):
        """ tries to find the activated FSStore backend
            and searches for all its audio files.
        """

        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            mediaserver.client.overlay_headers = {
                'user-agent': 'Xbox/Coherence emulation'
            }
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.search(container_id='4', criteria='')
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            """ we expect four audio files here """
            self.assertEqual(len(r), 4)
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d
示例#51
0
class PluginMatchedAnnouncement(SyncTestCase):
    """
    Tests for handling by ``NativeStorageServer`` of storage server
    announcements that are handled by an ``IFoolscapStoragePlugin``.
    """
    @inlineCallbacks
    def make_node(self, introducer_furl, storage_plugin, plugin_config):
        """
        Create a client node with the given configuration.

        :param bytes introducer_furl: The introducer furl with which to
            configure the client.

        :param bytes storage_plugin: The name of a storage plugin to enable.

        :param dict[bytes, bytes] plugin_config: Configuration to supply to
            the enabled plugin.  May also be ``None`` for no configuration
            section (distinct from ``{}`` which creates an empty configuration
            section).
        """
        tempdir = TempDir()
        self.useFixture(tempdir)
        self.basedir = FilePath(tempdir.path)
        self.basedir.child(u"private").makedirs()
        self.useFixture(UseTestPlugins())

        self.node_fixture = self.useFixture(
            UseNode(
                plugin_config,
                storage_plugin,
                self.basedir,
                introducer_furl,
            ))
        self.config = self.node_fixture.config
        self.node = yield self.node_fixture.create_node()
        [self.introducer_client] = self.node.introducer_clients

    def publish(self, server_id, announcement, introducer_client):
        for subscription in introducer_client.subscribed_to:
            if subscription.service_name == u"storage":
                subscription.cb(server_id, announcement, *subscription.args,
                                **subscription.kwargs)

    def get_storage(self, server_id, node):
        storage_broker = node.get_storage_broker()
        native_storage_server = storage_broker.servers[server_id]
        return native_storage_server._storage

    def set_rref(self, server_id, node, rref):
        storage_broker = node.get_storage_broker()
        native_storage_server = storage_broker.servers[server_id]
        native_storage_server._rref = rref

    @inlineCallbacks
    def test_ignored_non_enabled_plugin(self):
        """
        An announcement that could be matched by a plugin that is not enabled is
        not matched.
        """
        yield self.make_node(
            introducer_furl=SOME_FURL,
            storage_plugin="tahoe-lafs-dummy-v1",
            plugin_config=None,
        )
        server_id = b"v0-abcdef"
        ann = {
            u"service-name":
            u"storage",
            u"storage-options": [{
                # notice how the announcement is for a different storage plugin
                # than the one that is enabled.
                u"name": u"tahoe-lafs-dummy-v2",
                u"storage-server-FURL": SOME_FURL,
            }],
        }
        self.publish(server_id, ann, self.introducer_client)
        storage = self.get_storage(server_id, self.node)
        self.assertIsInstance(storage, _NullStorage)

    @inlineCallbacks
    def test_enabled_plugin(self):
        """
        An announcement that could be matched by a plugin that is enabled with
        configuration is matched and the plugin's storage client is used.
        """
        plugin_config = {
            "abc": "xyz",
        }
        plugin_name = "tahoe-lafs-dummy-v1"
        yield self.make_node(
            introducer_furl=SOME_FURL,
            storage_plugin=plugin_name,
            plugin_config=plugin_config,
        )
        server_id = b"v0-abcdef"
        ann = {
            u"service-name":
            u"storage",
            u"storage-options": [{
                # and this announcement is for a plugin with a matching name
                u"name": plugin_name,
                u"storage-server-FURL": SOME_FURL,
            }],
        }
        self.publish(server_id, ann, self.introducer_client)
        storage = self.get_storage(server_id, self.node)
        self.assertTrue(verifyObject(
            IFoolscapStorageServer,
            storage,
        ), )
        expected_rref = object()
        # Can't easily establish a real Foolscap connection so fake the result
        # of doing so...
        self.set_rref(server_id, self.node, expected_rref)
        self.expectThat(
            storage.storage_server,
            MatchesAll(
                IsInstance(DummyStorageClient),
                MatchesStructure(
                    get_rref=AfterPreprocessing(
                        lambda get_rref: get_rref(),
                        Is(expected_rref),
                    ),
                    configuration=Equals(plugin_config),
                    announcement=Equals({
                        u'name':
                        plugin_name,
                        u'storage-server-FURL':
                        u'pb://abcde@nowhere/fake',
                    }),
                ),
            ),
        )

    @inlineCallbacks
    def test_enabled_no_configuration_plugin(self):
        """
        An announcement that could be matched by a plugin that is enabled with no
        configuration is matched and the plugin's storage client is used.
        """
        plugin_name = "tahoe-lafs-dummy-v1"
        yield self.make_node(
            introducer_furl=SOME_FURL,
            storage_plugin=plugin_name,
            plugin_config=None,
        )
        server_id = b"v0-abcdef"
        ann = {
            u"service-name":
            u"storage",
            u"storage-options": [{
                # and this announcement is for a plugin with a matching name
                u"name": plugin_name,
                u"storage-server-FURL": SOME_FURL,
            }],
        }
        self.publish(server_id, ann, self.introducer_client)
        storage = self.get_storage(server_id, self.node)
        self.addDetail("storage", text_content(str(storage)))
        self.expectThat(
            storage.storage_server,
            MatchesAll(
                IsInstance(DummyStorageClient),
                MatchesStructure(configuration=Equals({}), ),
            ),
        )
示例#52
0
class PathModificationTests(TwistedModulesTestCase):
    """
    These tests share setup/cleanup behavior of creating a dummy package and
    stuffing some code in it.
    """

    _serialnum = itertools.count()  # used to generate serial numbers for

    # package names.

    def setUp(self):
        self.pathExtensionName = self.mktemp()
        self.pathExtension = FilePath(self.pathExtensionName)
        self.pathExtension.createDirectory()
        self.packageName = "pyspacetests%d" % (next(self._serialnum), )
        self.packagePath = self.pathExtension.child(self.packageName)
        self.packagePath.createDirectory()
        self.packagePath.child("__init__.py").setContent(b"")
        self.packagePath.child("a.py").setContent(b"")
        self.packagePath.child("b.py").setContent(b"")
        self.packagePath.child("c__init__.py").setContent(b"")
        self.pathSetUp = False

    def _setupSysPath(self):
        assert not self.pathSetUp
        self.pathSetUp = True
        sys.path.append(self.pathExtensionName)

    def _underUnderPathTest(self, doImport=True):
        moddir2 = self.mktemp()
        fpmd = FilePath(moddir2)
        fpmd.createDirectory()
        fpmd.child("foozle.py").setContent(b"x = 123\n")
        self.packagePath.child("__init__.py").setContent(
            networkString("__path__.append({0})\n".format(repr(moddir2))))
        # Cut here
        self._setupSysPath()
        modinfo = modules.getModule(self.packageName)
        self.assertEqual(
            self.findByIteration(self.packageName + ".foozle",
                                 modinfo,
                                 importPackages=doImport),
            modinfo["foozle"],
        )
        self.assertEqual(modinfo["foozle"].load().x, 123)

    def test_underUnderPathAlreadyImported(self):
        """
        Verify that iterModules will honor the __path__ of already-loaded packages.
        """
        self._underUnderPathTest()

    def _listModules(self):
        pkginfo = modules.getModule(self.packageName)
        nfni = [
            modinfo.name.split(".")[-1] for modinfo in pkginfo.iterModules()
        ]
        nfni.sort()
        self.assertEqual(nfni, ["a", "b", "c__init__"])

    def test_listingModules(self):
        """
        Make sure the module list comes back as we expect from iterModules on a
        package, whether zipped or not.
        """
        self._setupSysPath()
        self._listModules()

    def test_listingModulesAlreadyImported(self):
        """
        Make sure the module list comes back as we expect from iterModules on a
        package, whether zipped or not, even if the package has already been
        imported.
        """
        self._setupSysPath()
        namedAny(self.packageName)
        self._listModules()

    def tearDown(self):
        # Intentionally using 'assert' here, this is not a test assertion, this
        # is just an "oh f**k what is going ON" assertion. -glyph
        if self.pathSetUp:
            HORK = "path cleanup failed: don't be surprised if other tests break"
            assert sys.path.pop() is self.pathExtensionName, HORK + ", 1"
            assert self.pathExtensionName not in sys.path, HORK + ", 2"
示例#53
0
class DeleteLocalSnapshotTests(SyncTestCase):
    """
    Test the 'delete single snapshot' codepaths in MagicFolderConfig

    non-Hypothesis-using tests
    """
    def setUp(self):
        super(DeleteLocalSnapshotTests, self).setUp()
        self.author = create_local_author(u"alice")

        self.temp = FilePath(self.mktemp())
        self.stash = self.temp.child("stash")
        self.stash.makedirs()
        self.magic = self.temp.child("magic")
        self.magic.makedirs()

        self.db = MagicFolderConfig.initialize(
            u"some-folder",
            SQLite3DatabaseLocation.memory(),
            self.author,
            self.stash,
            # collective dircap
            random_dircap(readonly=True),
            # upload dircap
            random_dircap(),
            self.magic,
            60,
            60,
        )

        self.snap0 = LocalSnapshot(
            relpath="foo",
            author=self.author,
            metadata=dict(),
            content_path=FilePath("snap0 content"),
            parents_local=[],
            parents_remote=[],
        )
        self.db.store_local_snapshot(
            self.snap0,
            PathState(42, seconds_to_ns(42), seconds_to_ns(42)),
        )

        self.snap1 = LocalSnapshot(
            relpath="foo",
            author=self.author,
            metadata=dict(),
            content_path=FilePath("snap1 content"),
            parents_local=[self.snap0],
            parents_remote=[],
        )
        self.db.store_local_snapshot(
            self.snap1,
            PathState(42, seconds_to_ns(42), seconds_to_ns(42)),
        )

        self.snap2 = LocalSnapshot(
            relpath="foo",
            author=self.author,
            metadata=dict(),
            content_path=FilePath("snap2 content"),
            parents_local=[self.snap1],
            parents_remote=[],
        )
        self.db.store_local_snapshot(
            self.snap2,
            PathState(42, seconds_to_ns(42), seconds_to_ns(42)),
        )

    def test_delete_one_local_snapshot(self):
        """
        Given a chain of three snapshots deleting the oldest one results
        in a proper chain of two snapshots.
        """
        # we have a "leaf" snapshot "snap2" with parent "snap1" and
        # grandparent "snap0" snap2->snap1->snap0

        # pretend we uploaded the oldest ancestor, the only one we
        # _can_ upload (semantically)
        remote0 = RemoteSnapshot(
            self.snap0.relpath,
            self.snap0.author,
            {
                "relpath": self.snap0.relpath,
                "modification_time": 1234,
            },
            capability=random_immutable(directory=True),
            parents_raw=[],
            content_cap=random_immutable(),
            metadata_cap=random_immutable(),
        )

        self.db.delete_local_snapshot(self.snap0, remote0)

        # we should still have a 3-snapshot chain, but there should be
        # only 2 local snapshots and one remote

        # start with the "leaf", the most-local snapshot
        dbsnap2 = self.db.get_local_snapshot(self.snap0.relpath)
        self.assertThat(dbsnap2.content_path,
                        Equals(FilePath("snap2 content")))
        self.assertThat(
            dbsnap2.parents_local,
            AfterPreprocessing(len, Equals(1)),
        )
        self.assertThat(
            dbsnap2.parents_remote,
            AfterPreprocessing(len, Equals(0)),
        )

        # the leaf had just one parent, which is local -- examine it
        dbsnap1 = dbsnap2.parents_local[0]
        self.assertThat(dbsnap1.content_path,
                        Equals(FilePath("snap1 content")))
        self.assertThat(
            dbsnap1.parents_local,
            AfterPreprocessing(len, Equals(0)),
        )
        self.assertThat(
            dbsnap1.parents_remote,
            AfterPreprocessing(len, Equals(1)),
        )

        # the "middle" parent (above) has no local parents and one
        # remote, which is correct .. the final parent should be the
        # one we replaced the local with.
        self.assertThat(
            dbsnap1.parents_remote[0],
            Equals(remote0.capability),
        )

    def test_delete_several_local_snapshots(self):
        """
        Given a chain of three snapshots deleting them all results in now
        snapshots.
        """
        # we have a "leaf" snapshot "snap2" with parent "snap1" and
        # grandparent "snap0" snap2->snap1->snap0

        # pretend we uploaded the oldest ancestor, the only one we
        # _can_ upload (semantically)
        remote0 = RemoteSnapshot(
            self.snap0.relpath,
            self.snap0.author,
            {
                "relpath": self.snap0.relpath,
                "modification_time": 1234,
            },
            capability=random_immutable(directory=True),
            parents_raw=[],
            content_cap=random_immutable(),
            metadata_cap=random_immutable(),
        )

        self.db.delete_local_snapshot(self.snap0, remote0)
        self.db.delete_local_snapshot(self.snap1, remote0)
        self.db.delete_local_snapshot(self.snap2, remote0)

        try:
            self.db.get_local_snapshot(self.snap0.relpath)
            assert False, "expected no local snapshots"
        except KeyError:
            pass

    def test_delete_snapshot_twice(self):
        """
        Attempting to delete a snapshot that doesn't exist is an error.
        """
        remote0 = RemoteSnapshot(
            self.snap0.relpath,
            self.snap0.author,
            {
                "relpath": self.snap0.relpath,
                "modification_time": 1234,
            },
            capability=random_immutable(directory=True),
            parents_raw=[],
            content_cap=random_immutable(),
            metadata_cap=random_immutable(),
        )

        self.db.delete_local_snapshot(self.snap0, remote0)

        try:
            self.db.delete_local_snapshot(self.snap0, remote0)
            assert False, "Shouldn't be able to delete unfound snapshot"
        except ValueError:
            pass

    def test_delete_no_snapshots_for_relpath(self):
        """
        Attempting to delete an unknown relpath is an error.
        """
        snap = LocalSnapshot(
            relpath="non-existent",
            author=self.author,
            metadata=dict(),
            content_path=FilePath("snap0 content"),
            parents_local=[],
            parents_remote=[],
        )
        remote = RemoteSnapshot(
            snap.relpath,
            snap.author,
            {
                "relpath": snap.relpath,
                "modification_time": 1234,
            },
            capability="URI:DIR2-CHK:aaaa:aaaa",
            parents_raw=[],
            content_cap="URI:CHK:bbbb:bbbb",
            metadata_cap="URI:CHK:cccc:cccc",
        )

        try:
            self.db.delete_local_snapshot(snap, remote)
            assert False, "Shouldn't be able to delete unfound snapshot"
        except KeyError:
            pass
示例#54
0
class StoragePluginWebPresence(AsyncTestCase):
    """
    Tests for the web resources ``IFoolscapStorageServer`` plugins may expose.
    """
    @inlineCallbacks
    def setUp(self):
        super(StoragePluginWebPresence, self).setUp()

        self.useFixture(UseTestPlugins())

        self.port_assigner = SameProcessStreamEndpointAssigner()
        self.port_assigner.setUp()
        self.addCleanup(self.port_assigner.tearDown)
        self.storage_plugin = u"tahoe-lafs-dummy-v1"

        from twisted.internet import reactor
        _, port_endpoint = self.port_assigner.assign(reactor)

        tempdir = TempDir()
        self.useFixture(tempdir)
        self.basedir = FilePath(tempdir.path)
        self.basedir.child(u"private").makedirs()
        self.node_fixture = self.useFixture(
            UseNode(
                plugin_config={
                    "web": "1",
                },
                node_config={
                    "tub.location": "127.0.0.1:1",
                    "web.port": ensure_text(port_endpoint),
                },
                storage_plugin=self.storage_plugin,
                basedir=self.basedir,
                introducer_furl=SOME_FURL,
            ))
        self.node = yield self.node_fixture.create_node()
        self.webish = self.node.getServiceNamed(WebishServer.name)
        self.node.startService()
        self.addCleanup(self.node.stopService)
        self.port = self.webish.getPortnum()

    @inlineCallbacks
    def test_plugin_resource_path(self):
        """
        The plugin's resource is published at */storage-plugins/<plugin name>*.
        """
        url = u"http://127.0.0.1:{port}/storage-plugins/{plugin_name}".format(
            port=self.port,
            plugin_name=self.storage_plugin,
        ).encode("utf-8")
        result = yield do_http("get", url)
        self.assertThat(loads(result), Equals({"web": "1"}))

    @inlineCallbacks
    def test_plugin_resource_persistent_across_requests(self):
        """
        The plugin's resource is loaded and then saved and re-used for future
        requests.
        """
        url = URL(
            scheme=u"http",
            host=u"127.0.0.1",
            port=self.port,
            path=(
                u"storage-plugins",
                self.storage_plugin,
                u"counter",
            ),
        ).to_text().encode("utf-8")
        values = {
            loads((yield do_http("get", url)))[u"value"],
            loads((yield do_http("get", url)))[u"value"],
        }
        self.assertThat(
            values,
            # If the counter manages to go up then the state stuck around.
            Equals({1, 2}),
        )
示例#55
0
class GlobalConfigDatabaseMagicFolderTests(SyncTestCase):
    """
    Tests for the ``GlobalConfigDatabase`` APIs that deal with individual
    ``MagicFolderConfig`` instances.
    """
    def setUp(self):
        super(GlobalConfigDatabaseMagicFolderTests, self).setUp()
        self.setup_tempdir()

    def setup_example(self):
        self.setup_tempdir()

    def setup_tempdir(self):
        self.temp = FilePath(self.mktemp())
        self.node_dir = FilePath(self.mktemp())
        self.tahoe_dir = self.useFixture(NodeDirectory(self.node_dir))

    @given(
        folder_names(), )
    # These examples ensure that it is possible to generate magic folders that
    # contain characters that are invalid on windows.
    @example(u".")
    @example(u":")
    @example(u'"')
    def test_create_folder(self, folder_name):
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        magic.makedirs()
        magic_folder = config.create_magic_folder(
            folder_name,
            magic,
            alice,
            Capability.from_string(
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq"
            ),
            Capability.from_string(
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia"
            ),
            60,
            60,
        )
        self.assertThat(
            magic_folder.author,
            Equals(alice),
        )

    def test_create_folder_duplicate(self):
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        magic.makedirs()
        config.create_magic_folder(
            u"foo",
            magic,
            alice,
            Capability.from_string(
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq"
            ),
            Capability.from_string(
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia"
            ),
            60,
            60,
        )
        with ExpectedException(APIError,
                               "Already have a magic-folder named 'foo'"):
            config.create_magic_folder(
                u"foo",
                magic,
                alice,
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
                60,
                60,
            )

    def test_create_folder_trailing_dot_space(self):
        """
        We can create folders that differ only in having a trailing dot or space in the name.

        Windows will strip a trailing dot or space from filenames, so test that
        we don't get state-directory colisions with names that differ only in a
        trailing dot or space.
        """
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        magic.makedirs()
        config.create_magic_folder(
            u"foo",
            magic,
            alice,
            Capability.from_string(
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq"
            ),
            Capability.from_string(
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia"
            ),
            60,
            60,
        )
        config.create_magic_folder(
            u"foo.",
            magic,
            alice,
            Capability.from_string(
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq"
            ),
            Capability.from_string(
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia"
            ),
            60,
            60,
        )
        config.create_magic_folder(
            u"foo ",
            magic,
            alice,
            Capability.from_string(
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq"
            ),
            Capability.from_string(
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia"
            ),
            60,
            60,
        )

    def test_folder_nonexistant_magic_path(self):
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        with ExpectedException(APIError, ".*{}.*".format(escape(magic.path))):
            config.create_magic_folder(
                u"foo",
                magic,
                alice,
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
                60,
                None,
            )

    def test_folder_state_already_exists(self):
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        name = u"foo"
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        state = config._get_state_path(name)
        magic.makedirs()
        state.makedirs()  # shouldn't pre-exist, though
        with ExpectedException(APIError, ".*{}.*".format(escape(state.path))):
            config.create_magic_folder(
                name,
                state,
                alice,
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
                60,
                60,
            )

    def test_folder_get_path(self):
        """
        we can retrieve the stash-path from a magic-folder-confgi
        """
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        name = u"foo"
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        magic.makedirs()
        config.create_magic_folder(
            name,
            magic,
            alice,
            Capability.from_string(
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq"
            ),
            Capability.from_string(
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia"
            ),
            60,
            60,
        )
        self.assertThat(config.list_magic_folders(), Contains(u"foo"))
        mf_config = config.get_magic_folder(u"foo")
        self.assertThat(
            mf_config.stash_path,
            Equals(config._get_state_path(name).child(u"stash")),
        )

    def test_folder_cache(self):
        """
        After calling `remove_magic_folder`, `get_magic_folder` raises `NoSuchMagicFolder`
        even if there is a live reference to the previous `MagicFolderConfig` instance.
        """
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        name = u"foo"
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        magic.makedirs()
        config.create_magic_folder(
            name,
            magic,
            alice,
            Capability.from_string(
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq"
            ),
            Capability.from_string(
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia"
            ),
            60,
            None,
        )

        # We grab a reference to the `MagicFolderConfig` so that the cache
        # doesn't get cleaned up by the object being collected. This simulates
        # the case of MagicFolder having circular references and pointers to
        # the MagicFolderConfig.
        folder_config = config.get_magic_folder(name)

        config.remove_magic_folder(name)
        with self.assertRaises(NoSuchMagicFolder):
            config.get_magic_folder(name)

        del folder_config

    def test_get_folder_nonexistent(self):
        """
        an error to retrieve a non-existent folder
        """
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        with ExpectedException(NoSuchMagicFolder):
            config.get_magic_folder(u"non-existent")

    @given(
        tuples(
            text(),
            characters(
                whitelist_categories=(
                    "Cc",
                    "Cs",
                    "Cn",
                ),
                whitelist_characters=("/", "\\"),
            ),
            text(),
        ).map("".join))
    def test_get_folder_illegal_characters(self, folder_name):
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        magic.makedirs()
        with ExpectedException(InvalidMagicFolderName):
            config.create_magic_folder(
                folder_name,
                magic,
                alice,
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
                60,
                60,
            )
示例#56
0
class MagicFolderConfigCurrentSnapshotTests(SyncTestCase):
    """
    Tests for the ``MagicFolderConfig`` APIs that deal with current snapshots.
    """
    def setUp(self):
        super(MagicFolderConfigCurrentSnapshotTests, self).setUp()
        self.author = create_local_author(u"alice")

    def setup_example(self):
        self.temp = FilePath(self.mktemp())
        self.stash = self.temp.child("stash")
        self.stash.makedirs()
        self.magic = self.temp.child("magic")
        self.magic.makedirs()

        self.db = MagicFolderConfig.initialize(
            u"some-folder",
            SQLite3DatabaseLocation.memory(),
            self.author,
            self.stash,
            random_dircap(readonly=True),
            random_dircap(),
            self.magic,
            60,
            60,
        )

    @given(
        remote_snapshots(),
        path_states(),
    )
    def test_remotesnapshot_roundtrips(self, snapshot, path_state):
        """
        The capability for a ``RemoteSnapshot`` added with
        ``MagicFolderConfig.store_downloaded_snapshot`` can be read back with
        ``MagicFolderConfig.get_remotesnapshot``.
        """
        self.db.store_downloaded_snapshot(snapshot.relpath, snapshot,
                                          path_state)
        capability = self.db.get_remotesnapshot(snapshot.relpath)
        db_path_state = self.db.get_currentsnapshot_pathstate(snapshot.relpath)
        self.assertThat((capability, db_path_state),
                        Equals((snapshot.capability, path_state)))

    @given(
        relative_paths(),
        remote_snapshots(),
        path_states(),
    )
    def test_remotesnapshot_with_existing_state(self, relpath, snapshot,
                                                path_state):
        """
        A ``RemoveSnapshot`` can be added without path state, if existing path
        state is in the database.
        """
        self.db.store_currentsnapshot_state(relpath, path_state)
        self.db.store_downloaded_snapshot(relpath, snapshot, path_state)
        capability = self.db.get_remotesnapshot(relpath)
        db_path_state = self.db.get_currentsnapshot_pathstate(relpath)
        self.assertThat((capability, db_path_state),
                        Equals((snapshot.capability, path_state)))

    @given(
        relative_paths(),
        remote_snapshots(),
    )
    def test_store_remote_without_state(self, relpath, snapshot):
        """
        Calling :py:`MagicFolderConfig.store_uploaded_snapshot` without a path
        state, when there isn't already corresponding path state fails.
        """
        with ExpectedException(RemoteSnapshotWithoutPathState):
            self.db.store_uploaded_snapshot(relpath, snapshot, 42)

    @given(
        path_segments(), )
    def test_remotesnapshot_not_found(self, path):
        """
        ``MagicFolderConfig.get_remotesnapshot`` raises ``KeyError`` if there is
        no known remote snapshot for the given path.
        """
        with ExpectedException(KeyError, escape(repr(path))):
            self.db.get_remotesnapshot(path)
        with ExpectedException(KeyError, escape(repr(path))):
            self.db.get_currentsnapshot_pathstate(path)

    @given(
        path_segments(),
        path_states(),
    )
    def test_remotesnapshot_not_found_with_state(self, path, path_state):
        """
        ``MagicFolderConfig.get_remotesnapshot`` raises ``KeyError`` if there
        is no known remote snapshot for the given path, but there is path state
        for it.
        """
        self.db.store_currentsnapshot_state(path, path_state)
        with ExpectedException(KeyError, escape(repr(path))):
            self.db.get_remotesnapshot(path)

    @given(
        # Get two RemoteSnapshots with the same path.
        path_segments().flatmap(
            lambda path: tuples(
                just(path),
                lists(
                    remote_snapshots(relpaths=just(path)),
                    min_size=2,
                    max_size=2,
                ),
            ), ),
        path_states(),
    )
    def test_replace_remotesnapshot(self, snapshots, path_state):
        """
        A ``RemoteSnapshot`` for a given path can be replaced by a new
        ``RemoteSnapshot`` for the same path, without providing path state.
        """
        path, snapshots = snapshots
        self.db.store_downloaded_snapshot(path, snapshots[0], path_state)
        self.db.store_uploaded_snapshot(path, snapshots[1], 42)
        capability = self.db.get_remotesnapshot(path)
        db_path_state = self.db.get_currentsnapshot_pathstate(path)
        self.assertThat((capability, db_path_state),
                        Equals((snapshots[1].capability, path_state)))

    @given(
        # Get two RemoteSnapshots with the same path.
        path_segments().flatmap(
            lambda path: tuples(
                just(path),
                lists(
                    remote_snapshots(relpaths=just(path)),
                    min_size=2,
                    max_size=2,
                ),
            ), ),
        lists(path_states(), min_size=2, max_size=2))
    def test_replace_remotesnapshot_with_state(self, snapshots, path_states):
        """
        A ``RemoteSnapshot`` for a given path can be replaced by a new
        ``RemoteSnapshot`` for the same path, when providing path state.
        """
        path, snapshots = snapshots
        self.db.store_downloaded_snapshot(path, snapshots[0], path_states[0])
        self.db.store_downloaded_snapshot(path, snapshots[1], path_states[1])
        capability = self.db.get_remotesnapshot(path)
        db_path_state = self.db.get_currentsnapshot_pathstate(path)
        self.assertThat((capability, db_path_state),
                        Equals((snapshots[1].capability, path_states[1])))

    @given(
        # Get two RemoteSnapshots with the same path.
        path_segments(),
        lists(path_states(), min_size=2, max_size=2))
    def test_replace_path_state(self, path, path_states):
        """
        A ``RemoteSnapshot`` for a given path can be replaced by a new
        ``RemoteSnapshot`` for the same path, when providing path state.
        """
        self.db.store_currentsnapshot_state(path, path_states[0])
        self.db.store_currentsnapshot_state(path, path_states[1])
        db_path_state = self.db.get_currentsnapshot_pathstate(path)
        self.assertThat(db_path_state, Equals(path_states[1]))

    @given(
        lists(path_segments(), min_size=1, unique=True),
        lists(path_states(), min_size=1),
    )
    def test_all_path_status(self, paths, path_states):
        """
        We can recover all path-statuses
        """
        # maybe there's a way to make hypothesis make same-sized lists?
        size = min(len(paths), len(path_states))
        paths = paths[:size]
        path_states = path_states[:size]

        self.db._get_current_timestamp = lambda: 1234
        for p, ps in zip(paths, path_states):
            self.db.store_currentsnapshot_state(p, ps)

        self.assertThat(
            self.db.get_all_current_snapshot_pathstates(),
            Equals([(p, ps, seconds_to_ns(1234), None)
                    for p, ps in zip(paths, path_states)]),
        )

    def test_remotesnapshot_caps_missing(self):
        """
        A KeyError is thrown accessing missing remotesnapshot_caps
        """
        self.setup_example()
        with self.assertRaises(KeyError):
            self.db.get_remotesnapshot_caps("a-missing-snapshot-name")

    def test_tahoe_object_sizes(self):
        """
        Correct capability sizes are returned
        """
        self.setup_example()
        self.assertThat(self.db.get_tahoe_object_sizes(), Equals([]))

    @given(
        relative_paths(),
        path_states(),
    )
    def test_tahoe_object_sizes_local(self, relpath, state):
        """
        Local-only snapshots get no size returned
        """
        self.db.store_currentsnapshot_state(relpath, state)
        self.assertThat(self.db.get_tahoe_object_sizes(), Equals([]))

    @given(
        relative_paths(),
        remote_snapshots(),
        path_states(),
    )
    def test_tahoe_object_sizes_remote(self, relpath, remote_snap, state):
        """
        Correct capability sizes are returned
        """
        self.db.store_downloaded_snapshot(relpath, remote_snap, state)

        s = remote_snap.capability.size
        c = remote_snap.content_cap.size
        m = remote_snap.metadata_cap.size
        self.assertThat(self.db.get_tahoe_object_sizes(), Equals([s, c, m]))
示例#57
0
class GlobalConfigDatabaseMagicFolderTests(SyncTestCase):
    """
    Tests for the ``GlobalConfigDatabase`` APIs that deal with individual
    ``MagicFolderConfig`` instances.
    """
    def setUp(self):
        super(GlobalConfigDatabaseMagicFolderTests, self).setUp()
        self.temp = FilePath(self.mktemp())
        self.node_dir = FilePath(self.mktemp())
        self.tahoe_dir = self.useFixture(NodeDirectory(self.node_dir))

    def test_create_folder(self):
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        magic.makedirs()
        magic_folder = config.create_magic_folder(
            u"foo",
            magic,
            self.temp.child("state"),
            alice,
            u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
            u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
            60,
        )
        self.assertThat(
            magic_folder.author,
            Equals(alice),
        )

    def test_create_folder_duplicate(self):
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        magic.makedirs()
        config.create_magic_folder(
            u"foo",
            magic,
            self.temp.child("state"),
            alice,
            u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
            u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
            60,
        )
        with ExpectedException(ValueError,
                               "Already have a magic-folder named 'foo'"):
            config.create_magic_folder(
                u"foo",
                magic,
                self.temp.child("state2"),
                alice,
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
                60,
            )

    def test_folder_nonexistant_magic_path(self):
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        with ExpectedException(ValueError, ".*{}.*".format(magic.path)):
            config.create_magic_folder(
                u"foo",
                magic,
                self.temp.child("state"),
                alice,
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
                60,
            )

    def test_folder_state_already_exists(self):
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        state = self.temp.child("state")
        magic.makedirs()
        state.makedirs()  # shouldn't pre-exist, though
        with ExpectedException(ValueError, ".*{}.*".format(state.path)):
            config.create_magic_folder(
                u"foo",
                magic,
                state,
                alice,
                u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
                u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
                60,
            )

    def test_folder_get_path(self):
        """
        we can retrieve the stash-path from a magic-folder-confgi
        """
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        alice = create_local_author(u"alice")
        magic = self.temp.child("magic")
        state = self.temp.child("state")
        magic.makedirs()
        config.create_magic_folder(
            u"foo",
            magic,
            state,
            alice,
            u"URI:DIR2-RO:ou5wvazwlyzmqw7yof5ifmgmau:xqzt6uoulu4f3m627jtadpofnizjt3yoewzeitx47vw6memofeiq",
            u"URI:DIR2:bgksdpr3lr2gvlvhydxjo2izea:dfdkjc44gg23n3fxcxd6ywsqvuuqzo4nrtqncrjzqmh4pamag2ia",
            60,
        )
        self.assertThat(config.list_magic_folders(), Contains(u"foo"))
        mf_config = config.get_magic_folder(u"foo")
        self.assertThat(mf_config.stash_path, Equals(state.child("stash")))

    def test_get_folder_nonexistent(self):
        """
        an error to retrieve a non-existent folder
        """
        config = create_global_configuration(self.temp, u"tcp:1234",
                                             self.node_dir,
                                             u"tcp:localhost:1234")
        with ExpectedException(ValueError):
            config.get_magic_folder(u"non-existent")
示例#58
0
class StoreLocalSnapshotTests(SyncTestCase):
    """
    Tests for the ``MagicFolderConfig`` APIs which store and load
    ``LocalSnapshot`` objects.
    """
    def setUp(self):
        super(StoreLocalSnapshotTests, self).setUp()
        self.author = create_local_author(u"alice")
        self.uncooperator = Cooperator(
            terminationPredicateFactory=lambda: lambda: False,
            scheduler=lambda f: f(),
        )
        self.addCleanup(self.uncooperator.stop)

    def setup_example(self):
        self.temp = FilePath(self.mktemp())
        self.stash = self.temp.child("stash")
        self.stash.makedirs()
        self.magic = self.temp.child("magic")
        self.magic.makedirs()

        self.db = MagicFolderConfig.initialize(
            u"some-folder",
            SQLite3DatabaseLocation.memory(),
            self.author,
            self.stash,
            # collective dircap
            random_dircap(readonly=True),
            # upload dircap
            random_dircap(),
            self.magic,
            60,
            60,
        )

    @given(
        content1=binary(min_size=1),
        content2=binary(min_size=1),
        filename=magic_folder_filenames(),
        stash_subdir=path_segments(),
    )
    def test_serialize_store_deserialize_snapshot(self, content1, content2,
                                                  filename, stash_subdir):
        """
        create a new snapshot (this will have no parent snapshots).
        """
        data1 = BytesIO(content1)

        snapshots = []

        d = create_snapshot(
            relpath=filename,
            author=self.author,
            data_producer=data1,
            snapshot_stash_dir=self.stash,
            parents=[],
            cooperator=self.uncooperator,
        )
        d.addCallback(snapshots.append)

        self.assertThat(
            d,
            succeeded(Always()),
        )

        self.db.store_local_snapshot(
            snapshots[0],
            PathState(42, seconds_to_ns(42), seconds_to_ns(42)),
        )

        # now modify the same file and create a new local snapshot
        data2 = BytesIO(content2)
        d = create_snapshot(
            relpath=filename,
            author=self.author,
            data_producer=data2,
            snapshot_stash_dir=self.stash,
            parents=[snapshots[0]],
            cooperator=self.uncooperator,
        )
        d.addCallback(snapshots.append)

        # serialize and store the snapshot in db.
        # It should rewrite the previously written row.
        self.db.store_local_snapshot(
            snapshots[1],
            PathState(42, seconds_to_ns(42), seconds_to_ns(42)),
        )

        # now read back the serialized snapshot from db
        reconstructed_local_snapshot = self.db.get_local_snapshot(filename)

        self.assertThat(
            reconstructed_local_snapshot,
            MatchesStructure(relpath=Equals(filename),
                             parents_local=HasLength(1)))

        # the initial snapshot does not have parent snapshots
        self.assertThat(reconstructed_local_snapshot.parents_local[0],
                        MatchesStructure(parents_local=HasLength(0), ))

    @given(
        content1=binary(min_size=1),
        content2=binary(min_size=1),
        filename=magic_folder_filenames(),
        stash_subdir=path_segments(),
    )
    def test_store_snapshot_missing_parents(self, content1, content2, filename,
                                            stash_subdir):
        """
        Storing a snapshot whose parents are not in the database will raise an
        error.
        """
        data1 = BytesIO(content1)

        snapshots = []

        d = create_snapshot(
            relpath=filename,
            author=self.author,
            data_producer=data1,
            snapshot_stash_dir=self.stash,
            parents=[],
            cooperator=self.uncooperator,
        )
        d.addCallback(snapshots.append)

        # now modify the same file and create a new local snapshot
        data2 = BytesIO(content2)
        d = create_snapshot(
            relpath=filename,
            author=self.author,
            data_producer=data2,
            snapshot_stash_dir=self.stash,
            parents=[snapshots[0]],
            cooperator=self.uncooperator,
        )
        d.addCallback(snapshots.append)

        # serialize and store the snapshot in db.
        # It should rewrite the previously written row.
        with ExpectedException(LocalSnapshotMissingParent):
            self.db.store_local_snapshot(
                snapshots[1],
                PathState(42, seconds_to_ns(42), seconds_to_ns(42)),
            )

    @given(
        local_snapshots(), )
    def test_delete_all_local_snapshots_for(self, snapshot):
        """
        After a local snapshot is deleted from the database,
        ``MagicFolderConfig.get_local_snapshot`` raises ``KeyError`` for that
        snapshot's path.
        """
        self.db.store_local_snapshot(
            snapshot,
            PathState(42, seconds_to_ns(42), seconds_to_ns(42)),
        )
        self.db.delete_all_local_snapshots_for(snapshot.relpath)
        with ExpectedException(KeyError, escape(repr(snapshot.relpath))):
            self.db.get_local_snapshot(snapshot.relpath)
示例#59
0
class FormatDiscoveryTests(TestCase):
    """
    Tests which discover the parsing method based on the imported module name.
    """
    def setUp(self):
        """
        Create a temporary directory with a package structure in it.
        """
        self.entry = FilePath(mkdtemp())
        self.preTestModules = sys.modules.copy()
        sys.path.append(self.entry.path)
        pkg = self.entry.child(b"twisted_python_versions_package")
        pkg.makedirs()
        pkg.child(b"__init__.py").setContent(
            b"from twisted.python.versions import Version\n"
            b"version = Version('twisted_python_versions_package', 1, 0, 0)\n")
        self.svnEntries = pkg.child(b".svn")
        self.svnEntries.makedirs()


    def tearDown(self):
        """
        Remove the imported modules and sys.path modifications.
        """
        sys.modules.clear()
        sys.modules.update(self.preTestModules)
        sys.path.remove(self.entry.path)


    def checkSVNFormat(self, formatVersion, entriesText, expectedRevision):
        """
        Check for the given revision being detected after setting the SVN
        entries text and format version of the test directory structure.
        """
        self.svnEntries.child(b"format").setContent(formatVersion + b"\n")
        self.svnEntries.child(b"entries").setContent(entriesText)
        self.assertEqual(self.getVersion()._getSVNVersion(), expectedRevision)


    def getVersion(self):
        """
        Import and retrieve the Version object from our dynamically created
        package.
        """
        import twisted_python_versions_package
        return twisted_python_versions_package.version


    def test_detectVersion4(self):
        """
        Verify that version 4 format file will be properly detected and parsed.
        """
        self.checkSVNFormat(b"4", VERSION_4_ENTRIES, b'18211')


    def test_detectVersion8(self):
        """
        Verify that version 8 format files will be properly detected and
        parsed.
        """
        self.checkSVNFormat(b"8", VERSION_8_ENTRIES, b'22715')


    def test_detectVersion9(self):
        """
        Verify that version 9 format files will be properly detected and
        parsed.
        """
        self.checkSVNFormat(b"9", VERSION_9_ENTRIES, b'22715')


    def test_unparseableEntries(self):
        """
        Verify that the result is C{b"Unknown"} for an apparently supported
        version for which parsing of the entries file fails.
        """
        self.checkSVNFormat(b"4", b"some unsupported stuff", b"Unknown")


    def test_detectVersion10(self):
        """
        Verify that version 10 format files will be properly detected and
        parsed.

        Differing from previous formats, the version 10 format lacks a
        I{format} file and B{only} has the version information on the first
        line of the I{entries} file.
        """
        self.svnEntries.child(b"entries").setContent(VERSION_10_ENTRIES)
        self.assertEqual(self.getVersion()._getSVNVersion(), b'22715')


    def test_detectUnknownVersion(self):
        """
        Verify that a new version of SVN will result in the revision 'Unknown'.
        """
        self.checkSVNFormat(b"some-random-new-version", b"ooga booga!", b'Unknown')


    def test_getVersionStringWithRevision(self):
        """
        L{getVersionString} includes the discovered revision number.
        """
        self.svnEntries.child(b"format").setContent(b"9\n")
        self.svnEntries.child(b"entries").setContent(VERSION_10_ENTRIES)
        version = getVersionString(self.getVersion())
        self.assertEqual(
            "twisted_python_versions_package 1.0.0+r22715",
            version)
        self.assertIsInstance(version, type(""))
示例#60
0
class ConflictTests(SyncTestCase):
    """
    Test conflicts
    """
    def setUp(self):
        super(ConflictTests, self).setUp()
        self.author = create_local_author(u"desktop")
        self.temp = FilePath(self.mktemp())
        self.stash = self.temp.child("stash")
        self.stash.makedirs()
        self.magic = self.temp.child("magic")
        self.magic.makedirs()

        self.db = MagicFolderConfig.initialize(
            u"some-folder",
            SQLite3DatabaseLocation.memory(),
            self.author,
            self.stash,
            random_dircap(readonly=True),
            random_dircap(),
            self.magic,
            60,
            60,
        )

    @given(
        tahoe_lafs_immutable_dir_capabilities(),
        tahoe_lafs_chk_capabilities(),
        tahoe_lafs_chk_capabilities(),
    )
    def test_add_list_conflict(self, remote_cap, meta_cap, content_cap):
        """
        Adding a conflict allows us to list it
        """
        snap = RemoteSnapshot(
            "foo",
            self.author,
            {
                "relpath": "foo",
                "modification_time": 1234
            },
            remote_cap,
            [],
            content_cap,
            meta_cap,
        )

        self.db.add_conflict(snap)
        self.assertThat(
            self.db.list_conflicts(),
            Equals({
                "foo": [Conflict(remote_cap, self.author.name)],
            }),
        )
        self.db.resolve_conflict("foo")

    @given(
        tahoe_lafs_immutable_dir_capabilities(),
        tahoe_lafs_chk_capabilities(),
        tahoe_lafs_chk_capabilities(),
    )
    def test_add_conflict_twice(self, remote_cap, meta_cap, content_cap):
        """
        It's an error to add the same conflict twice
        """
        snap = RemoteSnapshot(
            "foo",
            self.author,
            {
                "relpath": "foo",
                "modification_time": 1234
            },
            remote_cap,
            [],
            content_cap,
            meta_cap,
        )

        self.db.add_conflict(snap)
        with self.assertRaises(sqlite3.IntegrityError):
            self.db.add_conflict(snap)
        self.db.resolve_conflict("foo")

    @given(
        tahoe_lafs_immutable_dir_capabilities(),
        tahoe_lafs_immutable_dir_capabilities(),
        tahoe_lafs_chk_capabilities(),
        tahoe_lafs_chk_capabilities(),
    )
    def test_add_list_multi_conflict(self, remote0_cap, remote1_cap, meta_cap,
                                     content_cap):
        """
        A multiple-conflict is reflected in the list
        """
        assume(remote0_cap != remote1_cap)

        snap0 = RemoteSnapshot(
            "foo",
            create_local_author(u"desktop"),
            {
                "relpath": "foo",
                "modification_time": 1234
            },
            remote0_cap,
            [],
            content_cap,
            meta_cap,
        )
        snap1 = RemoteSnapshot(
            "foo",
            create_local_author(u"laptop"),
            {
                "relpath": "foo",
                "modification_time": 1234
            },
            remote1_cap,
            [],
            content_cap,
            meta_cap,
        )

        self.db.add_conflict(snap0)
        self.db.add_conflict(snap1)
        self.assertThat(
            self.db.list_conflicts(),
            Equals({
                "foo": [
                    Conflict(remote0_cap, "desktop"),
                    Conflict(remote1_cap, "laptop"),
                ],
            }))
        self.db.resolve_conflict("foo")

    @given(
        tahoe_lafs_immutable_dir_capabilities(),
        tahoe_lafs_immutable_dir_capabilities(),
        tahoe_lafs_chk_capabilities(),
    )
    def test_delete_multi_conflict(self, remote0_cap, remote1_cap,
                                   immutable_cap):
        """
        A multiple-conflict is successfully deleted
        """

        snap0 = RemoteSnapshot(
            "foo",
            create_local_author(u"laptop"),
            {
                "relpath": "foo",
                "modification_time": 1234
            },
            remote0_cap,
            [],
            immutable_cap,
            immutable_cap,
        )
        snap1 = RemoteSnapshot(
            "foo",
            create_local_author(u"phone"),
            {
                "relpath": "foo",
                "modification_time": 1234
            },
            remote1_cap,
            [],
            immutable_cap,
            immutable_cap,
        )

        self.db.add_conflict(snap0)
        self.db.add_conflict(snap1)
        self.assertThat(
            self.db.list_conflicts(),
            Equals({
                "foo": [
                    Conflict(remote0_cap, "laptop"),
                    Conflict(remote1_cap, "phone"),
                ]
            }),
        )

        self.db.resolve_conflict("foo")
        self.assertThat(
            self.db.list_conflicts(),
            Equals(dict()),
        )

    @given(
        relative_paths(),
        author_names(),
    )
    def test_conflict_file(self, relpath, author):
        """
        A conflict-file is detected as one.
        """
        conflict_path = self.db.magic_path.preauthChild(
            "{}.conflict-{}".format(relpath, author))

        self.assertThat(self.db.is_conflict_marker(conflict_path),
                        Equals(True))