Ejemplo n.º 1
0
class Writer(unittest.TestCase):
    test_data = """line1
line2
line3
"""

    def setUp(self):
        self.temp_dir = FilePath(tempfile.mkdtemp())
        self.existing_file_name = self.temp_dir.child("foo")
        with self.existing_file_name.open("w") as f:
            f.write(self.test_data)

    def test_write_existing_file(self):
        self.assertRaises(FileExists, FilesystemWriter, self.temp_dir.child("foo"))

    def test_finished_write(self):
        w = FilesystemWriter(self.temp_dir.child("bar"))
        w.write(self.test_data)
        w.finish()
        with self.temp_dir.child("bar").open() as f:
            self.assertEqual(f.read(), self.test_data)

    def test_cancelled_write(self):
        w = FilesystemWriter(self.temp_dir.child("bar"))
        w.write(self.test_data)
        w.cancel()
        self.failIf(self.temp_dir.child("bar").exists(), "If a write is cancelled, the file should not be left behind")

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 2
0
class TestFSStorageAssumptions(unittest.TestCase):

    def setUp(self):
        self.tmp_content = FilePath(self.mktemp())
        self.tmp_content.makedirs()
        self.storage = fs_storage.FSStore(None, name='my media',
                                          content=self.tmp_content.path,
                                          urlbase='http://fsstore-host/xyz',
                                          enable_inotify=False)

    def tearDown(self):
        self.tmp_content.remove()
        pass

    def test_ContentLen(self):
        self.assertEqual(len(self.storage.content), 1)
        self.assertEqual(len(self.storage.store), 1)
        self.assertEqual(self.storage.len(), 1)

    def test_Root(self):
        root = self.storage.get_by_id('1000')
        self.assertIs(root.parent, None)
        self.assertRaises(AttributeError, getattr, root, 'path')
        # A single path passed, so content is a "directory" named by
        # it's basename
        self.assertEqual(root.mimetype, 'directory')
        self.assertEqual(root.get_name(), 'temp')
Ejemplo n.º 3
0
class TestFSStorageAssumptions(unittest.TestCase):
    def setUp(self):
        self.tmp_content = FilePath(self.mktemp())
        self.tmp_content.makedirs()
        self.storage = fs_storage.FSStore(None,
                                          name='my media',
                                          content=self.tmp_content.path,
                                          urlbase='http://fsstore-host/xyz',
                                          enable_inotify=False)

    def tearDown(self):
        self.tmp_content.remove()
        pass

    def test_ContentLen(self):
        self.assertEqual(len(self.storage.content), 1)
        self.assertEqual(len(self.storage.store), 1)
        self.assertEqual(self.storage.len(), 1)

    def test_Root(self):
        root = self.storage.get_by_id('1000')
        self.assertIs(root.parent, None)
        self.assertRaises(AttributeError, getattr, root, 'path')
        # A single path passed, so content is a "directory" named by
        # it's basename
        self.assertEqual(root.mimetype, 'directory')
        self.assertEqual(root.get_name(), 'temp')
Ejemplo n.º 4
0
    def test_unix_already_listening_cant_delete(self):
        """
        A config with type = "unix" will create an endpoint for a UNIX socket
        at the given path, and delete it if required. If it can't delete it, it
        will raise an exception.
        """
        parent_fp = FilePath("/tmp").child(uuid4().hex)
        parent_fp.makedirs()
        fp = parent_fp.child(uuid4().hex)

        # Something is already there
        fp.setContent(b"")
        fp.chmod(0o544)
        parent_fp.chmod(0o544)

        reactor = SelectReactor()
        config = {"type": "unix", "path": fp.path}

        with self.assertRaises(OSError) as e:
            create_listening_endpoint_from_config(config, self.cbdir, reactor,
                                                  self.log)
        self.assertEqual(e.exception.errno, 13)  # Permission Denied

        parent_fp.chmod(0o777)
        parent_fp.remove()
Ejemplo n.º 5
0
def publish_artifacts_main(args, base_path, top_level):
    """
    Publish release artifacts.

    :param list args: The arguments passed to the scripts.
    :param FilePath base_path: The executable being run.
    :param FilePath top_level: The top-level of the flocker repository.
    """
    options = UploadOptions()

    try:
        options.parseOptions(args)
    except UsageError as e:
        sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
        raise SystemExit(1)
    except NotARelease:
        sys.stderr.write("%s: Can't publish artifacts for a non-release.\n"
                         % (base_path.basename(),))
        raise SystemExit(1)
    except DocumentationRelease:
        sys.stderr.write("%s: Can't publish artifacts for a documentation "
                         "release.\n" % (base_path.basename(),))
        raise SystemExit(1)

    dispatcher = ComposedDispatcher([boto_dispatcher, yum_dispatcher,
                                     base_dispatcher])

    scratch_directory = FilePath(tempfile.mkdtemp(
        prefix=b'flocker-upload-'))
    scratch_directory.child('packages').createDirectory()
    scratch_directory.child('python').createDirectory()
    scratch_directory.child('pip').createDirectory()

    try:
        sync_perform(
            dispatcher=dispatcher,
            effect=sequence([
                upload_packages(
                    scratch_directory=scratch_directory.child('packages'),
                    target_bucket=options['target'],
                    version=options['flocker-version'],
                    build_server=options['build-server'],
                    top_level=top_level,
                ),
                upload_python_packages(
                    scratch_directory=scratch_directory.child('python'),
                    target_bucket=options['target'],
                    top_level=top_level,
                    output=sys.stdout,
                    error=sys.stderr,
                ),
                upload_pip_index(
                    scratch_directory=scratch_directory.child('pip'),
                    target_bucket=options['target'],
                ),
            ]),
        )

    finally:
        scratch_directory.remove()
Ejemplo n.º 6
0
    def create_credentials():
        """
        Create PKI credentials for TLS access to libvirtd.

        Credentials are not signed by the host CA. This only allows
        unverified access but removes the need to transfer files
        between the host and the guest.
        """
        path = FilePath(tempfile.mkdtemp())
        try:
            ca = RootCredential.initialize(path, b"mycluster")
            NodeCredential.initialize(path, ca, uuid='client')
            ca_dir = FilePath('/etc/pki/CA')
            if not ca_dir.exists():
                ca_dir.makedirs()
            path.child(AUTHORITY_CERTIFICATE_FILENAME).copyTo(
                FilePath('/etc/pki/CA/cacert.pem')
            )
            client_key_dir = FilePath('/etc/pki/libvirt/private')
            if not client_key_dir.exists():
                client_key_dir.makedirs()
            client_key_dir.chmod(0700)
            path.child('client.key').copyTo(
                client_key_dir.child('clientkey.pem')
            )
            path.child('client.crt').copyTo(
                FilePath('/etc/pki/libvirt/clientcert.pem')
            )
        finally:
            path.remove()
Ejemplo n.º 7
0
class _UnixFixHome(object):
    """
    Mixin class to fix the HOME environment variable to something usable.

    @ivar home: FilePath pointing at C{homePath}.
    @type home: L{FilePath}

    @ivar homePath: relative path to the directory used as HOME during the
        tests.
    @type homePath: C{str}
    """
    def setUp(self):
        path = self.mktemp()
        self.home = FilePath(path)
        self.homePath = os.path.join(*self.home.segmentsFrom(FilePath(".")))
        if len(self.home.path) >= 70:
            # UNIX_MAX_PATH is 108, and the socket file is generally of length
            # 30, so we can't rely on mktemp...
            self.homePath = "_tmp"
            self.home = FilePath(self.homePath)
        self.home.makedirs()
        self.savedEnviron = os.environ.copy()
        os.environ["HOME"] = self.homePath

    def tearDown(self):
        os.environ.clear()
        os.environ.update(self.savedEnviron)
        self.home.remove()
Ejemplo n.º 8
0
    def test_unix_already_listening_cant_delete(self):
        """
        A config with type = "unix" will create an endpoint for a UNIX socket
        at the given path, and delete it if required. If it can't delete it, it
        will raise an exception.
        """
        parent_fp = FilePath("/tmp").child(uuid4().hex)
        parent_fp.makedirs()
        fp = parent_fp.child(uuid4().hex)

        # Something is already there
        fp.setContent(b"")
        fp.chmod(0o544)
        parent_fp.chmod(0o544)

        reactor = SelectReactor()
        config = {
            "type": "unix",
            "path": fp.path
        }

        with self.assertRaises(OSError) as e:
            create_listening_endpoint_from_config(config, self.cbdir,
                                                  reactor, self.log)
        self.assertEqual(e.exception.errno, 13)  # Permission Denied

        parent_fp.chmod(0o777)
        parent_fp.remove()
Ejemplo n.º 9
0
class TestFSStorage(unittest.TestCase):

    def setUp(self):
        self.tmp_content = FilePath(self.mktemp())
        f = self.tmp_content.child('my content')
        audio = f.child('audio')
        audio.makedirs()
        video = f.child('video')
        video.makedirs()
        images = f.child('images')
        images.makedirs()
        album = audio.child('album-1')
        album.makedirs()
        album.child('track-1.mp3').touch()
        album.child('track-2.mp3').touch()
        album = audio.child('album-2')
        album.makedirs()
        album.child('track-1.ogg').touch()
        album.child('track-2.ogg').touch()
        self.storage = fs_storage.FSStore(None, name='my media',
                                          content=self.tmp_content.path,
                                          urlbase='http://fsstore-host/xyz',
                                          enable_inotify=False)

    def tearDown(self):
        self.tmp_content.remove()

    def test_ContentLen(self):
        self.assertEqual(len(self.storage.content), 1)
        # 11 items, since we have "<tempdir>/my content/..."
        self.assertEqual(len(self.storage.store), 11)
        self.assertEqual(self.storage.len(), 11)
Ejemplo n.º 10
0
class _UnixFixHome(object):
    """
    Mixin class to fix the HOME environment variable to something usable.

    @ivar home: FilePath pointing at C{homePath}.
    @type home: L{FilePath}

    @ivar homePath: relative path to the directory used as HOME during the
        tests.
    @type homePath: C{str}
    """

    def setUp(self):
        path = self.mktemp()
        self.home = FilePath(path)
        self.homePath = os.path.join(*self.home.segmentsFrom(FilePath(".")))
        if len(self.home.path) >= 70:
            # UNIX_MAX_PATH is 108, and the socket file is generally of length
            # 30, so we can't rely on mktemp...
            self.homePath = "_tmp"
            self.home = FilePath(self.homePath)
        self.home.makedirs()
        self.savedEnviron = os.environ.copy()
        os.environ["HOME"] = self.homePath


    def tearDown(self):
        os.environ.clear()
        os.environ.update(self.savedEnviron)
        self.home.remove()
Ejemplo n.º 11
0
class Reader(unittest.TestCase):
    test_data = b"""line1
line2
line3
"""

    def setUp(self):
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.existing_file_name = self.temp_dir.child(b'foo')
        with self.existing_file_name.open('w') as f:
            f.write(self.test_data)

    def test_file_not_found(self):
        self.assertRaises(FileNotFound, FilesystemReader,
                          self.temp_dir.child(b'bar'))

    def test_read_existing_file(self):
        r = FilesystemReader(self.temp_dir.child(b'foo'))
        data = r.read(3)
        ostring = data
        while data:
            data = r.read(3)
            ostring += data
        self.assertEqual(r.read(3), b'')
        self.assertEqual(r.read(5), b'')
        self.assertEqual(r.read(7), b'')
        self.assertTrue(
            r.file_obj.closed,
            b"The file has been exhausted and should be in the closed state")
        self.assertEqual(ostring, self.test_data)

    def test_size(self):
        r = FilesystemReader(self.temp_dir.child(b'foo'))
        self.assertEqual(len(self.test_data), r.size)

    def test_size_when_reader_finished(self):
        r = FilesystemReader(self.temp_dir.child(b'foo'))
        r.finish()
        self.assertTrue(r.size is None)

    def test_size_when_file_removed(self):
        # FilesystemReader.size uses fstat() to discover the file's size, so
        # the absence of the file does not matter.
        r = FilesystemReader(self.temp_dir.child(b'foo'))
        self.existing_file_name.remove()
        self.assertEqual(len(self.test_data), r.size)

    def test_cancel(self):
        r = FilesystemReader(self.temp_dir.child(b'foo'))
        r.read(3)
        r.finish()
        self.assertTrue(
            r.file_obj.closed,
            "The session has been finished, so the file object should be in the closed state"
        )
        r.finish()

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 12
0
 def stop_pipeline(self):
     self.pipeline.set_state(Gst.State.READY)
     plfile = FilePath("playlist.m3u8")
     segments = FilePath("./").globChildren("segment*")
     for segment in segments:
         segment.remove()
     if plfile.exists() and plfile.isfile():
         plfile.remove()
Ejemplo n.º 13
0
class LocalOriginWriteOptionNegotiation(unittest.TestCase):

    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        self.writer = DelayedWriter(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.ws = LocalOriginWriteSession(('127.0.0.1', 65465), self.writer,
                                          options={b'blksize':b'123'}, _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.ws.timedOut, _clock=self.clock)
        self.ws.timeout_watchdog = self.wd
        self.ws.transport = self.transport


    def test_option_normal(self):
        self.ws.startProtocol()
        self.ws.datagramReceived(OACKDatagram({b'blksize':b'12'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.ws.session.block_size, WriteSession.block_size)
        self.assertEqual(self.transport.value(), ACKDatagram(0).to_wire())

        self.transport.clear()
        self.ws.datagramReceived(OACKDatagram({b'blksize':b'9'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.ws.session.block_size, WriteSession.block_size)
        self.assertEqual(self.transport.value(), ACKDatagram(0).to_wire())

        self.transport.clear()
        self.ws.datagramReceived(DATADatagram(1, b'foobarbaz').to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(3)
        self.assertTrue(self.ws.session.started)
        self.clock.advance(0.1)
        self.assertEqual(self.ws.session.block_size, 9)
        self.assertEqual(self.transport.value(), ACKDatagram(1).to_wire())

        self.transport.clear()
        self.ws.datagramReceived(DATADatagram(2, b'asdfghjkl').to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(3)
        self.assertEqual(self.transport.value(), ACKDatagram(2).to_wire())
        self.writer.finish()
        self.assertEqual(self.writer.file_path.open('r').read(), b'foobarbazasdfghjkl')

        self.transport.clear()
        self.ws.datagramReceived(OACKDatagram({b'blksize':b'12'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.ws.session.block_size, 9)
        self.assertEqual(self.transport.value(), ACKDatagram(0).to_wire())

    def test_option_timeout(self):
        self.ws.startProtocol()
        self.clock.advance(5)
        self.assertTrue(self.transport.disconnecting)

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 14
0
class LocalOriginWriteOptionNegotiation(unittest.TestCase):

    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        self.writer = DelayedWriter(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.ws = LocalOriginWriteSession(('127.0.0.1', 65465), self.writer,
                                          options={b'blksize':b'123'}, _clock=self.clock)
        self.wd = timedCaller([5], None, self.ws.timedOut, self.clock)
        self.ws.timeout_watchdog = self.wd
        self.ws.transport = self.transport


    def test_option_normal(self):
        self.ws.startProtocol()
        self.ws.datagramReceived(OACKDatagram({b'blksize':b'12'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.ws.session.block_size, WriteSession.block_size)
        self.assertEqual(self.transport.value(), ACKDatagram(0).to_wire())

        self.transport.clear()
        self.ws.datagramReceived(OACKDatagram({b'blksize':b'9'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.ws.session.block_size, WriteSession.block_size)
        self.assertEqual(self.transport.value(), ACKDatagram(0).to_wire())

        self.transport.clear()
        self.ws.datagramReceived(DATADatagram(1, b'foobarbaz').to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(3)
        self.assertTrue(self.ws.session.started)
        self.clock.advance(0.1)
        self.assertEqual(self.ws.session.block_size, 9)
        self.assertEqual(self.transport.value(), ACKDatagram(1).to_wire())

        self.transport.clear()
        self.ws.datagramReceived(DATADatagram(2, b'asdfghjkl').to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(3)
        self.assertEqual(self.transport.value(), ACKDatagram(2).to_wire())
        self.writer.finish()
        self.assertEqual(self.writer.file_path.open('r').read(), b'foobarbazasdfghjkl')

        self.transport.clear()
        self.ws.datagramReceived(OACKDatagram({b'blksize':b'12'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.ws.session.block_size, 9)
        self.assertEqual(self.transport.value(), ACKDatagram(0).to_wire())

    def test_option_timeout(self):
        self.ws.startProtocol()
        self.clock.advance(5)
        self.assertTrue(self.transport.disconnecting)

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 15
0
class LocalOriginReadOptionNegotiation(unittest.TestCase):
    test_data = b"""line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = LocalOriginReadSession(('127.0.0.1', 65465),
                                         self.reader,
                                         _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.rs.timedOut, _clock=self.clock)
        self.rs.timeout_watchdog = self.wd
        self.rs.transport = self.transport

    def test_option_normal(self):
        self.rs.startProtocol()
        self.rs.datagramReceived(
            OACKDatagram({
                b'blksize': b'9'
            }).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.rs.session.block_size, 9)
        self.clock.pump((1, ) * 3)
        self.assertEqual(self.transport.value(),
                         DATADatagram(1, self.test_data[:9]).to_wire())

        self.rs.datagramReceived(
            OACKDatagram({
                b'blksize': b'12'
            }).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.rs.session.block_size, 9)

        self.transport.clear()
        self.rs.datagramReceived(
            ACKDatagram(1).to_wire(), ('127.0.0.1', 65465))
        self.clock.pump((1, ) * 3)
        self.assertEqual(self.transport.value(),
                         DATADatagram(2, self.test_data[9:18]).to_wire())

        self.addCleanup(self.rs.cancel)

    def test_local_origin_read_option_timeout(self):
        self.rs.startProtocol()
        self.clock.advance(5)
        self.assertTrue(self.transport.disconnecting)

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 16
0
class Reader(unittest.TestCase):
    test_data = """line1
line2
line3
"""

    def setUp(self):
        self.temp_dir = FilePath(tempfile.mkdtemp())
        self.existing_file_name = self.temp_dir.child('foo')
        with self.existing_file_name.open('w') as f:
            f.write(self.test_data)

    def test_file_not_found(self):
        self.assertRaises(FileNotFound, FilesystemReader, self.temp_dir.child('bar'))

    def test_read_existing_file(self):
        r = FilesystemReader(self.temp_dir.child('foo'))
        data = r.read(3)
        ostring = data
        while data:
            data = r.read(3)
            ostring += data
        self.assertEqual(r.read(3), '')
        self.assertEqual(r.read(5), '')
        self.assertEqual(r.read(7), '')
        self.failUnless(r.file_obj.closed,
                        "The file has been exhausted and should be in the closed state")
        self.assertEqual(ostring, self.test_data)

    def test_size(self):
        r = FilesystemReader(self.temp_dir.child('foo'))
        self.assertEqual(len(self.test_data), r.size)

    def test_size_when_reader_finished(self):
        r = FilesystemReader(self.temp_dir.child('foo'))
        r.finish()
        self.assertIsNone(r.size)

    def test_size_when_file_removed(self):
        # FilesystemReader.size uses fstat() to discover the file's size, so
        # the absence of the file does not matter.
        r = FilesystemReader(self.temp_dir.child('foo'))
        self.existing_file_name.remove()
        self.assertEqual(len(self.test_data), r.size)

    def test_cancel(self):
        r = FilesystemReader(self.temp_dir.child('foo'))
        r.read(3)
        r.finish()
        self.failUnless(r.file_obj.closed,

            "The session has been finished, so the file object should be in the closed state")
        r.finish()

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 17
0
def run(argv=None):
    if argv is None:
        argv = sys.argv

    generated = FilePath(GENERATED_TILESETS)
    if generated.exists():
        generated.remove()
    sets = discoverTilesets(FilePath('tiles'))
    for ts in sets:
        ts.writeResources()
Ejemplo n.º 18
0
def buildAllTarballs(checkout, destination, templatePath=None):
    """
    Build complete tarballs (including documentation) for Twisted and all
    subprojects.

    This should be called after the version numbers have been updated and
    NEWS files created.

    @type checkout: L{FilePath}
    @param checkout: The SVN working copy from which a pristine source tree
        will be exported.
    @type destination: L{FilePath}
    @param destination: The directory in which tarballs will be placed.
    @type templatePath: L{FilePath}
    @param templatePath: Location of the template file that is used for the
        howto documentation.

    @raise UncleanWorkingDirectory: If there are modifications to the
        working directory of C{checkout}.
    @raise NotWorkingDirectory: If the C{checkout} path is not an SVN checkout.
    """
    if not checkout.child(".svn").exists():
        raise NotWorkingDirectory(
            "%s does not appear to be an SVN working directory." %
            (checkout.path, ))
    if runCommand(["svn", "st", checkout.path]).strip():
        raise UncleanWorkingDirectory(
            "There are local modifications to the SVN checkout in %s." %
            (checkout.path, ))

    workPath = FilePath(mkdtemp())
    export = workPath.child("export")
    runCommand(["svn", "export", checkout.path, export.path])
    twistedPath = export.child("twisted")
    version = Project(twistedPath).getVersion()
    versionString = version.base()

    apiBaseURL = "http://twistedmatrix.com/documents/%s/api/%%s.html" % (
        versionString)
    if not destination.exists():
        destination.createDirectory()
    db = DistributionBuilder(export,
                             destination,
                             templatePath=templatePath,
                             apiBaseURL=apiBaseURL)

    db.buildCore(versionString)
    for subproject in twisted_subprojects:
        if twistedPath.child(subproject).exists():
            db.buildSubProject(subproject, versionString)

    db.buildTwisted(versionString)
    workPath.remove()
Ejemplo n.º 19
0
 def test_0900_autoRepairKeyError(self):
     """
     
     """
     yield AsyncExecCmds(['/opt/HybridCluster/init.d/mysqld stop']).getDeferred()
     sampleBadDataPath = FilePath(__file__).sibling('bad-data')
     target = FilePath('/var/db/mysql/autorepair')
     try:
         target.remove()
     except OSError, e:
         if e.errno != ENOENT:
             raise
Ejemplo n.º 20
0
    def buildPDF(self, bookPath, inputDirectory, outputPath):
        """
        Build a PDF from the given a LaTeX book document.

        @type bookPath: L{FilePath}
        @param bookPath: The location of a LaTeX document defining a book.

        @type inputDirectory: L{FilePath}
        @param inputDirectory: The directory which the inputs of the book are
            relative to.

        @type outputPath: L{FilePath}
        @param outputPath: The location to which to write the resulting book.
        """
        if not bookPath.basename().endswith(".tex"):
            raise ValueError("Book filename must end with .tex")

        workPath = FilePath(mkdtemp())
        try:
            startDir = os.getcwd()
            try:
                os.chdir(inputDirectory.path)

                texToDVI = (
                    "latex -interaction=nonstopmode "
                    "-output-directory=%s %s") % (
                    workPath.path, bookPath.path)

                # What I tell you three times is true!
                # The first two invocations of latex on the book file allows it
                # correctly create page numbers for in-text references.  Why this is
                # the case, I could not tell you. -exarkun
                for i in range(3):
                    self.run(texToDVI)

                bookBaseWithoutExtension = bookPath.basename()[:-4]
                dviPath = workPath.child(bookBaseWithoutExtension + ".dvi")
                psPath = workPath.child(bookBaseWithoutExtension + ".ps")
                pdfPath = workPath.child(bookBaseWithoutExtension + ".pdf")
                self.run(
                    "dvips -o %(postscript)s -t letter -Ppdf %(dvi)s" % {
                        'postscript': psPath.path,
                        'dvi': dviPath.path})
                self.run("ps2pdf13 %(postscript)s %(pdf)s" % {
                        'postscript': psPath.path,
                        'pdf': pdfPath.path})
                pdfPath.moveTo(outputPath)
                workPath.remove()
            finally:
                os.chdir(startDir)
        except:
            workPath.moveTo(bookPath.parent().child(workPath.basename()))
            raise
Ejemplo n.º 21
0
 def validatePath():
     dbpath = FilePath(TimezoneCache.getDBPath())
     if not dbpath.exists():
         TimezoneCache.copyPackage("Copying")
     else:
         # Check if pkg is more recent and copy over
         pkgversion = TimezoneCache.getTZVersion(TimezoneCache._getPackageDBPath())
         dbversion = TimezoneCache.getTZVersion(dbpath.path)
         if not dbversion or pkgversion > dbversion:
             dbpath.remove()
             TimezoneCache.copyPackage("Updating")
         else:
             log.info("Valid timezones at {p}", p=dbpath.path)
Ejemplo n.º 22
0
 def test_0900_autoRepairKeyError(self):
     """
     
     """
     yield AsyncExecCmds(['/opt/HybridCluster/init.d/mysqld stop'
                          ]).getDeferred()
     sampleBadDataPath = FilePath(__file__).sibling('bad-data')
     target = FilePath('/var/db/mysql/autorepair')
     try:
         target.remove()
     except OSError, e:
         if e.errno != ENOENT:
             raise
Ejemplo n.º 23
0
 def validatePath():
     dbpath = FilePath(TimezoneCache.getDBPath())
     if not dbpath.exists():
         TimezoneCache.copyPackage("Copying")
     else:
         # Check if pkg is more recent and copy over
         pkgversion = TimezoneCache.getTZVersion(TimezoneCache._getPackageDBPath())
         dbversion = TimezoneCache.getTZVersion(dbpath.path)
         if not dbversion or pkgversion > dbversion:
             dbpath.remove()
             TimezoneCache.copyPackage("Updating")
         else:
             log.info("Valid timezones at %s" % (dbpath.path,))
Ejemplo n.º 24
0
def buildAllTarballs(checkout, destination, templatePath=None):
    """
    Build complete tarballs (including documentation) for Twisted and all
    subprojects.

    This should be called after the version numbers have been updated and
    NEWS files created.

    @type checkout: L{FilePath}
    @param checkout: The SVN working copy from which a pristine source tree
        will be exported.
    @type destination: L{FilePath}
    @param destination: The directory in which tarballs will be placed.
    @type templatePath: L{FilePath}
    @param templatePath: Location of the template file that is used for the
        howto documentation.

    @raise UncleanWorkingDirectory: If there are modifications to the
        working directory of C{checkout}.
    @raise NotWorkingDirectory: If the C{checkout} path is not an SVN checkout.
    """
    if not checkout.child(".svn").exists():
        raise NotWorkingDirectory(
            "%s does not appear to be an SVN working directory."
            % (checkout.path,))
    if runCommand(["svn", "st", checkout.path]).strip():
        raise UncleanWorkingDirectory(
            "There are local modifications to the SVN checkout in %s."
            % (checkout.path,))

    workPath = FilePath(mkdtemp())
    export = workPath.child("export")
    runCommand(["svn", "export", checkout.path, export.path])
    twistedPath = export.child("twisted")
    version = Project(twistedPath).getVersion()
    versionString = version.base()

    apiBaseURL = "http://twistedmatrix.com/documents/%s/api/%%s.html" % (
        versionString)
    if not destination.exists():
        destination.createDirectory()
    db = DistributionBuilder(export, destination, templatePath=templatePath,
        apiBaseURL=apiBaseURL)

    db.buildCore(versionString)
    for subproject in twisted_subprojects:
        if twistedPath.child(subproject).exists():
            db.buildSubProject(subproject, versionString)

    db.buildTwisted(versionString)
    workPath.remove()
Ejemplo n.º 25
0
class BootstrapRemoteOriginWrite(unittest.TestCase):

    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        self.writer = DelayedWriter(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.ws = RemoteOriginWriteSession(('127.0.0.1', 65465),
                                           self.writer,
                                           _clock=self.clock)
        self.ws.transport = self.transport
        self.ws.startProtocol()

    @inlineCallbacks
    def test_invalid_tid(self):
        bad_tid_dgram = ACKDatagram(123)
        yield self.ws.datagramReceived(bad_tid_dgram.to_wire(),
                                       ('127.0.0.1', 1111))
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.ws.cancel)

    def test_remote_origin_write_bootstrap(self):
        # Initial ACK
        ack_datagram_0 = ACKDatagram(0)
        self.clock.advance(0.1)
        self.assertEqual(self.transport.value(), ack_datagram_0.to_wire())
        self.assertFalse(self.transport.disconnecting)

        # Normal exchange
        self.transport.clear()
        d = self.ws.datagramReceived(
            DATADatagram(1, b'foobar').to_wire(), ('127.0.0.1', 65465))

        def cb(res):
            self.clock.advance(0.1)
            ack_datagram_1 = ACKDatagram(1)
            self.assertEqual(self.transport.value(), ack_datagram_1.to_wire())
            self.assertEqual(self.target.open('r').read(), b'foobar')
            self.assertFalse(self.transport.disconnecting)
            self.addCleanup(self.ws.cancel)

        d.addCallback(cb)
        self.clock.advance(3)
        return d

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 26
0
def buildAllTarballs(checkout, destination, templatePath=None):
    """
    Build complete tarballs (including documentation) for Twisted and all
    subprojects.

    This should be called after the version numbers have been updated and
    NEWS files created.

    @type checkout: L{FilePath}
    @param checkout: The repository from which a pristine source tree will be
        exported.
    @type destination: L{FilePath}
    @param destination: The directory in which tarballs will be placed.
    @type templatePath: L{FilePath}
    @param templatePath: Location of the template file that is used for the
        howto documentation.

    @raise UncleanWorkingDirectory: If there are modifications to the
        working directory of C{checkout}.
    @raise NotWorkingDirectory: If the C{checkout} path is not a supported VCS
        repository.
    """
    cmd = getRepositoryCommand(checkout)
    cmd.ensureIsWorkingDirectory(checkout)

    if not cmd.isStatusClean(checkout):
        raise UncleanWorkingDirectory(
            "There are local modifications to the repository in %s." %
            (checkout.path, ))

    workPath = FilePath(mkdtemp())
    export = workPath.child("export")
    cmd.exportTo(checkout, export)
    twistedPath = export.child("twisted")
    version = Project(twistedPath).getVersion()
    versionString = version.base()

    if not destination.exists():
        destination.createDirectory()
    db = DistributionBuilder(export, destination, templatePath=templatePath)

    db.buildCore(versionString)
    for subproject in twisted_subprojects:
        if twistedPath.child(subproject).exists():
            db.buildSubProject(subproject, versionString)

    db.buildTwisted(versionString)
    workPath.remove()
Ejemplo n.º 27
0
def buildAllTarballs(checkout, destination, templatePath=None):
    """
    Build complete tarballs (including documentation) for Twisted and all
    subprojects.

    This should be called after the version numbers have been updated and
    NEWS files created.

    @type checkout: L{FilePath}
    @param checkout: The repository from which a pristine source tree will be
        exported.
    @type destination: L{FilePath}
    @param destination: The directory in which tarballs will be placed.
    @type templatePath: L{FilePath}
    @param templatePath: Location of the template file that is used for the
        howto documentation.

    @raise UncleanWorkingDirectory: If there are modifications to the
        working directory of C{checkout}.
    @raise NotWorkingDirectory: If the C{checkout} path is not a supported VCS
        repository.
    """
    cmd = getRepositoryCommand(checkout)
    cmd.ensureIsWorkingDirectory(checkout)

    if not cmd.isStatusClean(checkout):
        raise UncleanWorkingDirectory(
            "There are local modifications to the repository in %s."
            % (checkout.path,))

    workPath = FilePath(mkdtemp())
    export = workPath.child("export")
    cmd.exportTo(checkout, export)
    twistedPath = export.child("twisted")
    version = Project(twistedPath).getVersion()
    versionString = version.base()

    if not destination.exists():
        destination.createDirectory()
    db = DistributionBuilder(export, destination, templatePath=templatePath)

    db.buildCore(versionString)
    for subproject in twisted_subprojects:
        if twistedPath.child(subproject).exists():
            db.buildSubProject(subproject, versionString)

    db.buildTwisted(versionString)
    workPath.remove()
Ejemplo n.º 28
0
class BootstrapLocalOriginWrite(unittest.TestCase):

    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        self.writer = DelayedWriter(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.ws = LocalOriginWriteSession(('127.0.0.1', 65465),
                                          self.writer,
                                          _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.ws.timedOut, _clock=self.clock)
        self.ws.timeout_watchdog = self.wd
        self.ws.transport = self.transport

    def test_invalid_tid(self):
        self.ws.startProtocol()
        bad_tid_dgram = ACKDatagram(123)
        self.ws.datagramReceived(bad_tid_dgram.to_wire(), ('127.0.0.1', 1111))

        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.ws.cancel)

    #test_invalid_tid.skip = 'Will go to another test case'

    def test_local_origin_write_session_handshake_timeout(self):
        self.ws.startProtocol()
        self.clock.advance(5)
        self.assertFalse(self.transport.value())
        self.assertTrue(self.transport.disconnecting)

    def test_local_origin_write_session_handshake_success(self):
        self.ws.session.block_size = 6
        self.ws.startProtocol()
        self.clock.advance(1)
        data_datagram = DATADatagram(1, b'foobar')
        self.ws.datagramReceived(data_datagram.to_wire(), ('127.0.0.1', 65465))
        self.clock.pump((1, ) * 3)
        self.assertEqual(self.transport.value(), ACKDatagram(1).to_wire())
        self.assertFalse(self.transport.disconnecting)
        self.assertFalse(self.wd.active())
        self.addCleanup(self.ws.cancel)

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 29
0
def publish_rpms_main(args, base_path, top_level):
    """
    The ClusterHQ yum repository contains packages for Flocker, as well as the
    dependencies which aren't available in Fedora 20 or CentOS 7. It is
    currently hosted on Amazon S3. When doing a release, we want to add the
    new Flocker packages, while preserving the existing packages in the
    repository. To do this, we download the current repository, add the new
    package, update the metadata, and then upload the repository.

    :param list args: The arguments passed to the script.
    :param FilePath base_path: The executable being run.
    :param FilePath top_level: The top-level of the flocker repository.
    """
    options = UploadOptions()

    try:
        options.parseOptions(args)
    except UsageError as e:
        sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
        raise SystemExit(1)

    dispatcher = ComposedDispatcher([boto_dispatcher, yum_dispatcher,
                                     base_dispatcher])

    try:
        scratch_directory = FilePath(tempfile.mkdtemp(
            prefix=b'flocker-upload-rpm-'))

        sync_perform(
            dispatcher=dispatcher,
            effect=upload_rpms(
                scratch_directory=scratch_directory,
                target_bucket=options['target'],
                version=options['flocker-version'],
                build_server=options['build-server'],
                ))

    except NotARelease:
        sys.stderr.write("%s: Can't upload RPMs for a non-release."
                         % (base_path.basename(),))
        raise SystemExit(1)
    except DocumentationRelease:
        sys.stderr.write("%s: Can't upload RPMs for a documentation release."
                         % (base_path.basename(),))
        raise SystemExit(1)
    finally:
        scratch_directory.remove()
Ejemplo n.º 30
0
class BootstrapLocalOriginRead(unittest.TestCase):
    test_data = b"""line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = LocalOriginReadSession(('127.0.0.1', 65465),
                                         self.reader,
                                         _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.rs.timedOut, _clock=self.clock)
        self.rs.timeout_watchdog = self.wd
        self.rs.transport = self.transport
        self.rs.startProtocol()

    def test_invalid_tid(self):
        data_datagram = DATADatagram(1, b'foobar')
        self.rs.datagramReceived(data_datagram, ('127.0.0.1', 11111))
        self.clock.advance(0.1)
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.rs.cancel)

    def test_local_origin_read_session_handshake_timeout(self):
        self.clock.advance(5)
        self.assertFalse(self.transport.value())
        self.assertTrue(self.transport.disconnecting)

    def test_local_origin_read_session_handshake_success(self):
        self.clock.advance(1)
        ack_datagram = ACKDatagram(0)
        self.rs.datagramReceived(ack_datagram.to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(2)
        self.assertTrue(self.transport.value())
        self.assertFalse(self.transport.disconnecting)
        self.assertFalse(self.wd.active())
        self.addCleanup(self.rs.cancel)

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 31
0
def publish_rpms_main(args, base_path, top_level):
    """
    The ClusterHQ yum repository contains packages for Flocker, as well as the
    dependencies which aren't available in Fedora 20 or CentOS 7. It is
    currently hosted on Amazon S3. When doing a release, we want to add the
    new Flocker packages, while preserving the existing packages in the
    repository. To do this, we download the current repository, add the new
    package, update the metadata, and then upload the repository.

    :param list args: The arguments passed to the script.
    :param FilePath base_path: The executable being run.
    :param FilePath top_level: The top-level of the flocker repository.
    """
    options = UploadOptions()

    try:
        options.parseOptions(args)
    except UsageError as e:
        sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
        raise SystemExit(1)

    dispatcher = ComposedDispatcher(
        [boto_dispatcher, yum_dispatcher, base_dispatcher])

    try:
        scratch_directory = FilePath(
            tempfile.mkdtemp(prefix=b'flocker-upload-rpm-'))

        sync_perform(dispatcher=dispatcher,
                     effect=upload_rpms(
                         scratch_directory=scratch_directory,
                         target_bucket=options['target'],
                         version=options['flocker-version'],
                         build_server=options['build-server'],
                     ))

    except NotARelease:
        sys.stderr.write("%s: Can't upload RPMs for a non-release." %
                         (base_path.basename(), ))
        raise SystemExit(1)
    except DocumentationRelease:
        sys.stderr.write("%s: Can't upload RPMs for a documentation release." %
                         (base_path.basename(), ))
        raise SystemExit(1)
    finally:
        scratch_directory.remove()
Ejemplo n.º 32
0
class BootstrapLocalOriginWrite(unittest.TestCase):

    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        self.writer = DelayedWriter(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.ws = LocalOriginWriteSession(('127.0.0.1', 65465), self.writer, _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.ws.timedOut, _clock=self.clock)
        self.ws.timeout_watchdog = self.wd
        self.ws.transport = self.transport

    def test_invalid_tid(self):
        self.ws.startProtocol()
        bad_tid_dgram = ACKDatagram(123)
        self.ws.datagramReceived(bad_tid_dgram.to_wire(), ('127.0.0.1', 1111))

        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.ws.cancel)
    #test_invalid_tid.skip = 'Will go to another test case'

    def test_local_origin_write_session_handshake_timeout(self):
        self.ws.startProtocol()
        self.clock.advance(5)
        self.assertFalse(self.transport.value())
        self.assertTrue(self.transport.disconnecting)

    def test_local_origin_write_session_handshake_success(self):
        self.ws.session.block_size = 6
        self.ws.startProtocol()
        self.clock.advance(1)
        data_datagram = DATADatagram(1, b'foobar')
        self.ws.datagramReceived(data_datagram.to_wire(), ('127.0.0.1', 65465))
        self.clock.pump((1,)*3)
        self.assertEqual(self.transport.value(), ACKDatagram(1).to_wire())
        self.assertFalse(self.transport.disconnecting)
        self.assertFalse(self.wd.active())
        self.addCleanup(self.ws.cancel)

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 33
0
class BootstrapRemoteOriginWrite(unittest.TestCase):

    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        self.writer = DelayedWriter(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.ws = RemoteOriginWriteSession(('127.0.0.1', 65465), self.writer, _clock=self.clock)
        self.ws.transport = self.transport
        self.ws.startProtocol()

    @inlineCallbacks
    def test_invalid_tid(self):
        bad_tid_dgram = ACKDatagram(123)
        yield self.ws.datagramReceived(bad_tid_dgram.to_wire(), ('127.0.0.1', 1111))
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.ws.cancel)

    def test_remote_origin_write_bootstrap(self):
        # Initial ACK
        ack_datagram_0 = ACKDatagram(0)
        self.clock.advance(0.1)
        self.assertEqual(self.transport.value(), ack_datagram_0.to_wire())
        self.assertFalse(self.transport.disconnecting)

        # Normal exchange
        self.transport.clear()
        d = self.ws.datagramReceived(DATADatagram(1, b'foobar').to_wire(), ('127.0.0.1', 65465))
        def cb(res):
            self.clock.advance(0.1)
            ack_datagram_1 = ACKDatagram(1)
            self.assertEqual(self.transport.value(), ack_datagram_1.to_wire())
            self.assertEqual(self.target.open('r').read(), b'foobar')
            self.assertFalse(self.transport.disconnecting)
            self.addCleanup(self.ws.cancel)
        d.addCallback(cb)
        self.clock.advance(3)
        return d

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 34
0
class BootstrapLocalOriginRead(unittest.TestCase):
    test_data = b"""line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = LocalOriginReadSession(('127.0.0.1', 65465), self.reader, _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.rs.timedOut, _clock=self.clock)
        self.rs.timeout_watchdog = self.wd
        self.rs.transport = self.transport
        self.rs.startProtocol()

    def test_invalid_tid(self):
        data_datagram = DATADatagram(1, b'foobar')
        self.rs.datagramReceived(data_datagram, ('127.0.0.1', 11111))
        self.clock.advance(0.1)
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.rs.cancel)

    def test_local_origin_read_session_handshake_timeout(self):
        self.clock.advance(5)
        self.assertFalse(self.transport.value())
        self.assertTrue(self.transport.disconnecting)

    def test_local_origin_read_session_handshake_success(self):
        self.clock.advance(1)
        ack_datagram = ACKDatagram(0)
        self.rs.datagramReceived(ack_datagram.to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(2)
        self.assertTrue(self.transport.value())
        self.assertFalse(self.transport.disconnecting)
        self.assertFalse(self.wd.active())
        self.addCleanup(self.rs.cancel)

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 35
0
class LocalOriginReadOptionNegotiation(unittest.TestCase):
    test_data = b"""line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = LocalOriginReadSession(('127.0.0.1', 65465), self.reader, _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.rs.timedOut, _clock=self.clock)
        self.rs.timeout_watchdog = self.wd
        self.rs.transport = self.transport

    def test_option_normal(self):
        self.rs.startProtocol()
        self.rs.datagramReceived(OACKDatagram({b'blksize':b'9'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.rs.session.block_size, 9)
        self.clock.pump((1,)*3)
        self.assertEqual(self.transport.value(), DATADatagram(1, self.test_data[:9]).to_wire())

        self.rs.datagramReceived(OACKDatagram({b'blksize':b'12'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.rs.session.block_size, 9)

        self.transport.clear()
        self.rs.datagramReceived(ACKDatagram(1).to_wire(), ('127.0.0.1', 65465))
        self.clock.pump((1,)*3)
        self.assertEqual(self.transport.value(), DATADatagram(2, self.test_data[9:18]).to_wire())

        self.addCleanup(self.rs.cancel)

    def test_local_origin_read_option_timeout(self):
        self.rs.startProtocol()
        self.clock.advance(5)
        self.assertTrue(self.transport.disconnecting)

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 36
0
def download_resources():
    if os.access(config.var_lib_path, os.W_OK):
        dst_directory = FilePath(config.var_lib_path)
    else:
        dst_directory = FilePath(config.ooni_home)

    print("Downloading {} to {}".format(ooni_resources_url,
                                        dst_directory.path))
    tmp_download_directory = FilePath(tempfile.mkdtemp())
    tmp_download_filename = tmp_download_directory.temporarySibling()


    try:
        yield downloadPage(ooni_resources_url, tmp_download_filename.path)
        ooni_resources_tar_gz = tarfile.open(tmp_download_filename.path)
        ooni_resources_tar_gz.extractall(tmp_download_directory.path)

        if not tmp_download_directory.child('GeoIP').exists():
            raise Exception("Could not find GeoIP data files in downloaded "
                            "tar.")

        if not tmp_download_directory.child('resources').exists():
            raise Exception("Could not find resources data files in "
                            "downloaded tar.")

        geoip_dir = dst_directory.child('GeoIP')
        resources_dir = dst_directory.child('resources')

        if geoip_dir.exists():
            geoip_dir.remove()
        tmp_download_directory.child('GeoIP').moveTo(geoip_dir)

        if resources_dir.exists():
            resources_dir.remove()
        tmp_download_directory.child('resources').moveTo(resources_dir)

        print("Written GeoIP files to {}".format(geoip_dir.path))
        print("Written resources files to {}".format(resources_dir.path))

    except Exception as exc:
        print("Failed to download resources!")
        raise exc

    finally:
        tmp_download_directory.remove()
Ejemplo n.º 37
0
def download_resources():
    if os.access(config.var_lib_path, os.W_OK):
        dst_directory = FilePath(config.var_lib_path)
    else:
        dst_directory = FilePath(config.ooni_home)

    print("Downloading {} to {}".format(ooni_resources_url,
                                        dst_directory.path))
    tmp_download_directory = FilePath(tempfile.mkdtemp())
    tmp_download_filename = tmp_download_directory.temporarySibling()

    try:
        yield downloadPage(ooni_resources_url, tmp_download_filename.path)
        ooni_resources_tar_gz = tarfile.open(tmp_download_filename.path)
        ooni_resources_tar_gz.extractall(tmp_download_directory.path)

        if not tmp_download_directory.child('GeoIP').exists():
            raise Exception("Could not find GeoIP data files in downloaded "
                            "tar.")

        if not tmp_download_directory.child('resources').exists():
            raise Exception("Could not find resources data files in "
                            "downloaded tar.")

        geoip_dir = dst_directory.child('GeoIP')
        resources_dir = dst_directory.child('resources')

        if geoip_dir.exists():
            geoip_dir.remove()
        tmp_download_directory.child('GeoIP').moveTo(geoip_dir)

        if resources_dir.exists():
            resources_dir.remove()
        tmp_download_directory.child('resources').moveTo(resources_dir)

        print("Written GeoIP files to {}".format(geoip_dir.path))
        print("Written resources files to {}".format(resources_dir.path))

    except Exception as exc:
        print("Failed to download resources!")
        raise exc

    finally:
        tmp_download_directory.remove()
Ejemplo n.º 38
0
class DirectoryChangeListenerTestCase(TestCase):
    def test_delete(self):
        """
        Verify directory deletions can be monitored
        """

        self.tmpdir = FilePath(self.mktemp())
        self.tmpdir.makedirs()

        def deleteAction():
            self.tmpdir.remove()

        resource = KQueueReactorTestFixture(self, deleteAction)
        storageService = StubStorageService(resource.reactor)
        delegate = DataStoreMonitor(resource.reactor, storageService)
        dcl = DirectoryChangeListener(resource.reactor, self.tmpdir.path,
                                      delegate)
        dcl.startListening()
        resource.runReactor()
        self.assertTrue(storageService.stopCalled)
        self.assertEquals(delegate.methodCalled, "deleted")

    def test_rename(self):
        """
        Verify directory renames can be monitored
        """

        self.tmpdir = FilePath(self.mktemp())
        self.tmpdir.makedirs()

        def renameAction():
            self.tmpdir.moveTo(FilePath(self.mktemp()))

        resource = KQueueReactorTestFixture(self, renameAction)
        storageService = StubStorageService(resource.reactor)
        delegate = DataStoreMonitor(resource.reactor, storageService)
        dcl = DirectoryChangeListener(resource.reactor, self.tmpdir.path,
                                      delegate)
        dcl.startListening()
        resource.runReactor()
        self.assertTrue(storageService.stopCalled)
        self.assertEquals(delegate.methodCalled, "renamed")
Ejemplo n.º 39
0
class Writer(unittest.TestCase):
    test_data = b"""line1
line2
line3
"""

    def setUp(self):
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.existing_file_name = self.temp_dir.child(b'foo')
        self.existing_file_name.setContent(self.test_data)

    def test_write_existing_file(self):
        self.assertRaises(FileExists, FilesystemWriter,
                          self.temp_dir.child(b'foo'))

    def test_write_to_non_existent_directory(self):
        new_directory = self.temp_dir.child(b"new")
        new_file = new_directory.child(b"baz")
        self.assertFalse(new_directory.exists())
        FilesystemWriter(new_file).finish()
        self.assertTrue(new_directory.exists())
        self.assertTrue(new_file.exists())

    def test_finished_write(self):
        w = FilesystemWriter(self.temp_dir.child(b'bar'))
        w.write(self.test_data)
        w.finish()
        with self.temp_dir.child(b'bar').open() as f:
            self.assertEqual(f.read(), self.test_data)

    def test_cancelled_write(self):
        w = FilesystemWriter(self.temp_dir.child(b'bar'))
        w.write(self.test_data)
        w.cancel()
        self.assertFalse(
            self.temp_dir.child(b'bar').exists(),
            "If a write is cancelled, the file should not be left behind")

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 40
0
class TestFSStorage(unittest.TestCase):

    def setUp(self):
        self.tmp_content = FilePath(self.mktemp())
        f = self.tmp_content.child('my content')
        audio = f.child('audio') ; audio.makedirs()
        video = f.child('video') ; video.makedirs()
        images = f.child('images') ; images.makedirs()
        album = audio.child('album-1')
        album.makedirs()
        album.child('track-1.mp3').touch()
        album.child('track-2.mp3').touch()
        album = audio.child('album-2')
        album.makedirs()
        album.child('track-1.ogg').touch()
        album.child('track-2.ogg').touch()
        self.storage = fs_storage.FSStore(None, name='my media',
                                          content=self.tmp_content.path,
                                          urlbase='http://fsstore-host/xyz',
                                          enable_inotify=False)

    def tearDown(self):
        self.tmp_content.remove()

    def test_ContentLen(self):
        self.assertEqual(len(self.storage.content), 1)
        # 11 items, since we have "<tempdir>/my content/..."
        self.assertEqual(len(self.storage.store), 11)
        self.assertEqual(self.storage.len(), 11)

    def test_Content(self):
        root = self.storage.get_by_id('1000')
        content = self.storage.get_by_id('1001')
        self.assertEqual(content.mimetype, 'directory')
        self.assertEqual(content.get_name(), 'my content')
        self.assertIs(root.get_children(0, 0)[0], content)
        self.assertEqual(self.storage.get_by_id('1002').get_name(),
                         'audio')
        self.assertEqual(self.storage.get_by_id('1005').get_name(),
                         'album-1')
Ejemplo n.º 41
0
class Reader(unittest.TestCase):
    test_data = """line1
line2
line3
"""

    def setUp(self):
        self.temp_dir = FilePath(tempfile.mkdtemp())
        self.existing_file_name = self.temp_dir.child("foo")
        with self.existing_file_name.open("w") as f:
            f.write(self.test_data)

    def test_file_not_found(self):
        self.assertRaises(FileNotFound, FilesystemReader, self.temp_dir.child("bar"))

    def test_read_existing_file(self):
        r = FilesystemReader(self.temp_dir.child("foo"))
        data = r.read(3)
        ostring = data
        while data:
            data = r.read(3)
            ostring += data
        self.assertEqual(r.read(3), "")
        self.assertEqual(r.read(5), "")
        self.assertEqual(r.read(7), "")
        self.failUnless(r.file_obj.closed, "The file has been exhausted and should be in the closed state")
        self.assertEqual(ostring, self.test_data)

    def test_cancel(self):
        r = FilesystemReader(self.temp_dir.child("foo"))
        r.read(3)
        r.finish()
        self.failUnless(
            r.file_obj.closed, "The session has been finished, so the file object should be in the closed state"
        )
        r.finish()

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 42
0
class Writer(unittest.TestCase):
    test_data = """line1
line2
line3
"""

    def setUp(self):
        self.temp_dir = FilePath(tempfile.mkdtemp())
        self.existing_file_name = self.temp_dir.child('foo')
        self.existing_file_name.setContent(self.test_data)

    def test_write_existing_file(self):
        self.assertRaises(FileExists, FilesystemWriter, self.temp_dir.child('foo'))

    def test_write_to_non_existent_directory(self):
        new_directory = self.temp_dir.child("new")
        new_file = new_directory.child("baz")
        self.assertFalse(new_directory.exists())
        FilesystemWriter(new_file).finish()
        self.assertTrue(new_directory.exists())
        self.assertTrue(new_file.exists())

    def test_finished_write(self):
        w = FilesystemWriter(self.temp_dir.child('bar'))
        w.write(self.test_data)
        w.finish()
        with self.temp_dir.child('bar').open() as f:
            self.assertEqual(f.read(), self.test_data)

    def test_cancelled_write(self):
        w = FilesystemWriter(self.temp_dir.child('bar'))
        w.write(self.test_data)
        w.cancel()
        self.failIf(self.temp_dir.child('bar').exists(),
                    "If a write is cancelled, the file should not be left behind")

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 43
0
def publish_artifacts_main(args, base_path, top_level):
    """
    Publish release artifacts.

    :param list args: The arguments passed to the scripts.
    :param FilePath base_path: The executable being run.
    :param FilePath top_level: The top-level of the flocker repository.
    """
    options = UploadOptions()

    try:
        options.parseOptions(args)
    except UsageError as e:
        sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
        raise SystemExit(1)
    except NotARelease:
        sys.stderr.write("%s: Can't publish artifacts for a non-release.\n"
                         % (base_path.basename(),))
        raise SystemExit(1)
    except DocumentationRelease:
        sys.stderr.write("%s: Can't publish artifacts for a documentation "
                         "release.\n" % (base_path.basename(),))
        raise SystemExit(1)

    dispatcher = ComposedDispatcher([boto_dispatcher, yum_dispatcher,
                                     base_dispatcher])

    scratch_directory = FilePath(tempfile.mkdtemp(
        prefix=b'flocker-upload-'))
    scratch_directory.child('packages').createDirectory()
    scratch_directory.child('python').createDirectory()
    scratch_directory.child('pip').createDirectory()
    scratch_directory.child('vagrant').createDirectory()
    scratch_directory.child('homebrew').createDirectory()

    box_type = "flocker-tutorial"
    vagrant_prefix = 'vagrant/tutorial/'

    box_name = "{box_type}-{version}.box".format(
        box_type=box_type,
        version=options['flocker-version'],
    )

    box_url = "https://{bucket}.s3.amazonaws.com/{key}".format(
        bucket=options['target'],
        key=vagrant_prefix + box_name,
    )

    try:
        sync_perform(
            dispatcher=dispatcher,
            effect=sequence([
                upload_packages(
                    scratch_directory=scratch_directory.child('packages'),
                    target_bucket=options['target'],
                    version=options['flocker-version'],
                    build_server=options['build-server'],
                    top_level=top_level,
                ),
                upload_python_packages(
                    scratch_directory=scratch_directory.child('python'),
                    target_bucket=options['target'],
                    top_level=top_level,
                    output=sys.stdout,
                    error=sys.stderr,
                ),
                upload_pip_index(
                    scratch_directory=scratch_directory.child('pip'),
                    target_bucket=options['target'],
                ),
                Effect(
                    CopyS3Keys(
                        source_bucket=DEV_ARCHIVE_BUCKET,
                        source_prefix=vagrant_prefix,
                        destination_bucket=options['target'],
                        destination_prefix=vagrant_prefix,
                        keys=[box_name],
                    )
                ),
                publish_vagrant_metadata(
                    version=options['flocker-version'],
                    box_url=box_url,
                    scratch_directory=scratch_directory.child('vagrant'),
                    box_name=box_type,
                    target_bucket=options['target'],
                ),
            ]),
        )

        publish_homebrew_recipe(
            homebrew_repo_url=options['homebrew-tap'],
            version=options['flocker-version'],
            source_bucket=options['target'],
            scratch_directory=scratch_directory.child('homebrew'),
            top_level=top_level,
        )

    finally:
        scratch_directory.remove()
Ejemplo n.º 44
0
def temporary_directory(suffix):
    temporary_directory = FilePath(mkdtemp(suffix=suffix))
    try:
        yield temporary_directory
    finally:
        temporary_directory.remove()
Ejemplo n.º 45
0
def create_listening_endpoint_from_config(config, cbdir, reactor, log):
    """
    Create a Twisted stream server endpoint from a Crossbar.io transport configuration.

    See: https://twistedmatrix.com/documents/current/api/twisted.internet.interfaces.IStreamServerEndpoint.html

    :param config: The transport configuration.
    :type config: dict
    :param cbdir: Crossbar.io node directory (we need this for TLS key/certificates).
    :type cbdir: str
    :param reactor: The reactor to use for endpoint creation.
    :type reactor: obj

    :returns obj -- An instance implementing IStreamServerEndpoint
    """
    endpoint = None

    # a TCP endpoint
    #
    if config['type'] == 'tcp':

        # the TCP protocol version (v4 or v6)
        #
        version = int(config.get('version', 4))

        # the listening port
        if isinstance(config['port'], str):
            # read port from environment variable ..
            try:
                port = int(environ[config['port'][1:]])
            except Exception as e:
                log.warn("Could not read listening port from env var: {e}",
                         e=e)
                raise
        else:
            port = config['port']

        # the listening interface
        #
        interface = str(config.get('interface', '').strip())

        # the TCP accept queue depth
        #
        backlog = int(config.get('backlog', 50))

        if 'tls' in config:
            # create a TLS server endpoint
            #
            if _HAS_TLS:
                # TLS server context
                context = _create_tls_server_context(config['tls'], cbdir, log)

                if version == 4:
                    endpoint = SSL4ServerEndpoint(reactor,
                                                  port,
                                                  context,
                                                  backlog=backlog,
                                                  interface=interface)
                elif version == 6:
                    raise Exception("TLS on IPv6 not implemented")
                else:
                    raise Exception(
                        "invalid TCP protocol version {}".format(version))
            else:
                raise Exception(
                    "TLS transport requested, but TLS packages not available:\n{}"
                    .format(_LACKS_TLS_MSG))

        else:
            # create a non-TLS server endpoint
            #
            if version == 4:
                endpoint = TCP4ServerEndpoint(reactor,
                                              port,
                                              backlog=backlog,
                                              interface=interface)
            elif version == 6:
                endpoint = TCP6ServerEndpoint(reactor,
                                              port,
                                              backlog=backlog,
                                              interface=interface)
            else:
                raise Exception(
                    "invalid TCP protocol version {}".format(version))

    # a Unix Domain Socket endpoint
    #
    elif config['type'] == 'unix':

        # the accept queue depth
        #
        backlog = int(config.get('backlog', 50))

        # the path
        #
        path = FilePath(join(cbdir, os.path.expandvars(config['path'])))

        # if there is already something there, delete it.
        #
        if path.exists():
            log.info(("{path} exists, attempting to remove before using as a "
                      "UNIX socket"),
                     path=path)
            path.remove()

        # create the endpoint
        #
        endpoint = UNIXServerEndpoint(reactor, path.path, backlog=backlog)

    # twisted endpoint-string
    elif config['type'] == 'twisted':
        endpoint = serverFromString(reactor, config['server_string'])

    # tor endpoint
    elif config['type'] == 'onion':  # or "tor"? r "tor_onion"?
        port = config['port']
        private_key_fname = _ensure_absolute(config['private_key_file'], cbdir)
        tor_control_ep = create_connecting_endpoint_from_config(
            config['tor_control_endpoint'], cbdir, reactor, log)
        version = config.get('version', 3)  # default to modern version 3

        try:
            with open(private_key_fname, 'r') as f:
                private_key = f.read().strip()
            log.info(
                "Onion private key from '{private_key_fname}'",
                private_key_fname=private_key_fname,
            )
        except (IOError, OSError):
            private_key = None

        @implementer(IStreamServerEndpoint)
        class _EphemeralOnion(object):
            @defer.inlineCallbacks
            def listen(self, proto_factory):
                # we don't care which local TCP port we listen on, but
                # we do need to know it
                local_ep = TCP4ServerEndpoint(reactor,
                                              0,
                                              interface="127.0.0.1")
                target_port = yield local_ep.listen(proto_factory)
                tor = yield txtorcon.connect(
                    reactor,
                    tor_control_ep,
                )

                log.info(
                    "Creating onion service (descriptor upload can take 30s or more)"
                )
                hs = yield tor.create_onion_service(
                    ports=[
                        (port, target_port.getHost().port),
                    ],
                    private_key=private_key,
                    version=version,
                )

                # if it's new, store our private key
                # XXX better "if private_key is None"?
                if not exists(private_key_fname):
                    with open(private_key_fname, 'w') as f:
                        f.write(hs.private_key)
                    log.info("Wrote private key to '{fname}'",
                             fname=private_key_fname)

                log.info(
                    "Listening on Tor onion service {hs.hostname} "
                    " with ports: {ports}",
                    hs=hs,
                    ports=" ".join(hs.ports),
                )
                defer.returnValue(target_port)

        endpoint = _EphemeralOnion()

    else:
        raise Exception("invalid endpoint type '{}'".format(config['type']))

    return endpoint
Ejemplo n.º 46
0
class WriteSessions(unittest.TestCase):

    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        self.writer = DelayedWriter(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.ws = WriteSession(self.writer, _clock=self.clock)
        self.ws.timeout = (4, 4, 4)
        self.ws.transport = self.transport
        self.ws.startProtocol()

    def test_ERROR(self):
        err_dgram = ERRORDatagram.from_code(ERR_NOT_DEFINED, b'no reason')
        self.ws.datagramReceived(err_dgram)
        self.clock.advance(0.1)
        self.assertFalse(self.transport.value())
        self.assertTrue(self.transport.disconnecting)

    @inlineCallbacks
    def test_DATA_stale_blocknum(self):
        self.ws.block_size = 6
        self.ws.blocknum = 2
        data_datagram = DATADatagram(1, b'foobar')
        yield self.ws.datagramReceived(data_datagram)
        self.writer.finish()
        self.assertFalse(self.target.open('r').read())
        self.assertFalse(self.transport.disconnecting)
        ack_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(ack_dgram.blocknum, 1)
        self.addCleanup(self.ws.cancel)

    @inlineCallbacks
    def test_DATA_invalid_blocknum(self):
        self.ws.block_size = 6
        data_datagram = DATADatagram(3, b'foobar')
        yield self.ws.datagramReceived(data_datagram)
        self.writer.finish()
        self.assertFalse(self.target.open('r').read())
        self.assertFalse(self.transport.disconnecting)
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertTrue(isinstance(err_dgram, ERRORDatagram))
        self.addCleanup(self.ws.cancel)

    def test_DATA(self):
        self.ws.block_size = 6
        data_datagram = DATADatagram(1, b'foobar')
        d = self.ws.datagramReceived(data_datagram)

        def cb(ign):
            self.clock.advance(0.1)
            #self.writer.finish()
            #self.assertEqual(self.target.open('r').read(), 'foobar')
            self.assertFalse(self.transport.disconnecting)
            ack_dgram = TFTPDatagramFactory(
                *split_opcode(self.transport.value()))
            self.assertEqual(ack_dgram.blocknum, 1)
            self.assertFalse(
                self.ws.completed,
                "Data length is equal to blocksize, no reason to stop")
            data_datagram = DATADatagram(2, b'barbaz')

            self.transport.clear()
            d = self.ws.datagramReceived(data_datagram)
            d.addCallback(cb_)
            self.clock.advance(3)
            return d

        def cb_(ign):
            self.clock.advance(0.1)
            self.assertFalse(self.transport.disconnecting)
            ack_dgram = TFTPDatagramFactory(
                *split_opcode(self.transport.value()))
            self.assertEqual(ack_dgram.blocknum, 2)
            self.assertFalse(
                self.ws.completed,
                "Data length is equal to blocksize, no reason to stop")

        d.addCallback(cb)
        self.addCleanup(self.ws.cancel)
        self.clock.advance(3)
        return d

    def test_DATA_finished(self):
        self.ws.block_size = 6

        # Send a terminating datagram
        data_datagram = DATADatagram(1, b'foo')
        d = self.ws.datagramReceived(data_datagram)

        def cb(res):
            self.clock.advance(0.1)
            self.assertEqual(self.target.open('r').read(), b'foo')
            ack_dgram = TFTPDatagramFactory(
                *split_opcode(self.transport.value()))
            self.assertTrue(isinstance(ack_dgram, ACKDatagram))
            self.assertTrue(
                self.ws.completed,
                "Data length is less, than blocksize, time to stop")
            self.transport.clear()

            # Send another datagram after the transfer is considered complete
            data_datagram = DATADatagram(2, b'foobar')
            self.ws.datagramReceived(data_datagram)
            self.assertEqual(self.target.open('r').read(), b'foo')
            err_dgram = TFTPDatagramFactory(
                *split_opcode(self.transport.value()))
            self.assertTrue(isinstance(err_dgram, ERRORDatagram))

            # Check for proper disconnection after grace timeout expires
            self.clock.pump((4, ) * 4)
            self.assertTrue(
                self.transport.disconnecting,
                "We are done and the grace timeout is over, should disconnect")

        d.addCallback(cb)
        self.clock.advance(2)
        return d

    def test_DATA_backoff(self):
        self.ws.block_size = 5

        data_datagram = DATADatagram(1, b'foobar')
        d = self.ws.datagramReceived(data_datagram)

        def cb(ign):
            self.clock.advance(0.1)
            ack_datagram = ACKDatagram(1)

            self.clock.pump((1, ) * 5)
            # Sent two times - initial send and a retransmit after first timeout
            self.assertEqual(self.transport.value(),
                             ack_datagram.to_wire() * 2)

            # Sent three times - initial send and two retransmits
            self.clock.pump((1, ) * 4)
            self.assertEqual(self.transport.value(),
                             ack_datagram.to_wire() * 3)

            # Sent still three times - initial send, two retransmits and the last wait
            self.clock.pump((1, ) * 4)
            self.assertEqual(self.transport.value(),
                             ack_datagram.to_wire() * 3)

            self.assertTrue(self.transport.disconnecting)

        d.addCallback(cb)
        self.clock.advance(2.1)
        return d

    @inlineCallbacks
    def test_failed_write(self):
        self.writer.cancel()
        self.ws.writer = FailingWriter()
        data_datagram = DATADatagram(1, b'foobar')
        yield self.ws.datagramReceived(data_datagram)
        self.flushLoggedErrors()
        self.clock.advance(0.1)
        err_datagram = TFTPDatagramFactory(
            *split_opcode(self.transport.value()))
        self.assertTrue(isinstance(err_datagram, ERRORDatagram))
        self.assertTrue(self.transport.disconnecting)

    def test_time_out(self):
        data_datagram = DATADatagram(1, b'foobar')
        d = self.ws.datagramReceived(data_datagram)

        def cb(ign):
            self.clock.pump((1, ) * 13)
            self.assertTrue(self.transport.disconnecting)

        d.addCallback(cb)
        self.clock.advance(4)
        return d

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 47
0
def run(args=None):
    """
    Parses the configuration options in C{args} and runs C{dpkg-buildpackage}
    to create a .deb file.

    @param args: List of strings representing the C{tap2deb} configuration
        options.
    @type args: L{list}
    """
    try:
        config = MyOptions()
        config.parseOptions(args)
    except usage.error as ue:
        sys.exit("%s: %s" % (sys.argv[0], ue))

    tapFile = config["tapfile"]
    baseTapFile = os.path.basename(config["tapfile"])
    protocol = config["protocol"] or os.path.splitext(baseTapFile)[0]
    debFile = config["debfile"] or "twisted-" + protocol
    version = config["set-version"]
    maintainer = config["maintainer"]
    description = config["description"] or ("A Twisted-based server for %(protocol)s" % vars())
    longDescription = config["long_description"] or "Automatically created by tap2deb"
    twistdOption = type_dict[config["type"]]
    date = now()
    directory = debFile + "-" + version
    pythonVersion = "%s.%s" % sys.version_info[:2]
    buildDir = FilePath(".build").child(directory)

    if buildDir.exists():
        buildDir.remove()

    debianDir = buildDir.child("debian")
    debianDir.child("source").makedirs()
    shutil.copy(tapFile, buildDir.path)

    debianDir.child("README.Debian").setContent("""This package was auto-generated by tap2deb\n""")

    debianDir.child("conffiles").setContent(
        """\
/etc/init.d/%(debFile)s
/etc/default/%(debFile)s
/etc/%(baseTapFile)s
"""
        % vars()
    )

    debianDir.child("default").setContent(
        """\
pidfile=/var/run/%(debFile)s.pid
rundir=/var/lib/%(debFile)s/
file=/etc/%(tapFile)s
logfile=/var/log/%(debFile)s.log
 """
        % vars()
    )

    debianDir.child("init.d").setContent(
        """\
#!/bin/sh

PATH=/sbin:/bin:/usr/sbin:/usr/bin

pidfile=/var/run/%(debFile)s.pid \
rundir=/var/lib/%(debFile)s/ \
file=/etc/%(tapFile)s \
logfile=/var/log/%(debFile)s.log

[ -r /etc/default/%(debFile)s ] && . /etc/default/%(debFile)s

test -x /usr/bin/twistd || exit 0
test -r $file || exit 0
test -r /usr/share/%(debFile)s/package-installed || exit 0


case "$1" in
    start)
        echo -n "Starting %(debFile)s: twistd"
        start-stop-daemon --start --quiet --exec /usr/bin/twistd -- \
                          --pidfile=$pidfile \
                          --rundir=$rundir \
                          --%(twistdOption)s=$file \
                          --logfile=$logfile
        echo "."
    ;;

    stop)
        echo -n "Stopping %(debFile)s: twistd"
        start-stop-daemon --stop --quiet  \
            --pidfile $pidfile
        echo "."
    ;;

    restart)
        $0 stop
        $0 start
    ;;

    force-reload)
        $0 restart
    ;;

    *)
        echo "Usage: /etc/init.d/%(debFile)s {start|stop|restart|force-reload}" >&2
        exit 1
    ;;
esac

exit 0
"""
        % vars()
    )

    debianDir.child("init.d").chmod(0755)

    debianDir.child("postinst").setContent(
        """\
#!/bin/sh
update-rc.d %(debFile)s defaults >/dev/null
invoke-rc.d %(debFile)s start
#DEBHELPER#
"""
        % vars()
    )

    debianDir.child("prerm").setContent(
        """\
#!/bin/sh
invoke-rc.d %(debFile)s stop
#DEBHELPER#
"""
        % vars()
    )

    debianDir.child("postrm").setContent(
        """\
#!/bin/sh
if [ "$1" = purge ]; then
        update-rc.d %(debFile)s remove >/dev/null
fi
#DEBHELPER#
"""
        % vars()
    )

    debianDir.child("changelog").setContent(
        """\
%(debFile)s (%(version)s) unstable; urgency=low

  * Created by tap2deb

 -- %(maintainer)s  %(date)s

"""
        % vars()
    )

    debianDir.child("control").setContent(
        """\
Source: %(debFile)s
Section: net
Priority: extra
Maintainer: %(maintainer)s
Build-Depends-Indep: debhelper, python (>= 2.6.5-7)
Standards-Version: 3.8.4
XS-Python-Version: current

Package: %(debFile)s
Architecture: all
Depends: ${python:Depends}, python-twisted-core
XB-Python-Version: ${python:Versions}
Description: %(description)s
 %(longDescription)s
"""
        % vars()
    )

    debianDir.child("copyright").setContent(
        """\
This package was auto-debianized by %(maintainer)s on
%(date)s

It was auto-generated by tap2deb

Upstream Author(s): 
Moshe Zadka <*****@*****.**> -- tap2deb author

Copyright:

Insert copyright here.
"""
        % vars()
    )

    debianDir.child("dirs").setContent(
        """\
etc/init.d
etc/default
var/lib/%(debFile)s
usr/share/doc/%(debFile)s
usr/share/%(debFile)s
"""
        % vars()
    )

    debianDir.child("rules").setContent(
        """\
#!/usr/bin/make -f

export DH_COMPAT=5

build: build-stamp
build-stamp:
	dh_testdir
	touch build-stamp

clean:
	dh_testdir
	dh_testroot
	rm -f build-stamp install-stamp
	dh_clean

install: install-stamp
install-stamp: build-stamp
	dh_testdir
	dh_testroot
	dh_clean -k
	dh_installdirs

	# Add here commands to install the package into debian/tmp.
	cp %(baseTapFile)s debian/tmp/etc/
	cp debian/init.d debian/tmp/etc/init.d/%(debFile)s
	cp debian/default debian/tmp/etc/default/%(debFile)s
	cp debian/copyright debian/tmp/usr/share/doc/%(debFile)s/
	cp debian/README.Debian debian/tmp/usr/share/doc/%(debFile)s/
	touch debian/tmp/usr/share/%(debFile)s/package-installed
	touch install-stamp

binary-arch: build install

binary-indep: build install
	dh_testdir
	dh_testroot
	dh_strip
	dh_compress
	dh_installchangelogs
	dh_python2
	dh_fixperms
	dh_installdeb
	dh_gencontrol
	dh_md5sums
	dh_builddeb

source diff:
	@echo >&2 'source and diff are obsolete - use dpkg-source -b'; false

binary: binary-indep binary-arch
.PHONY: build clean binary-indep binary-arch binary install
"""
        % vars()
    )

    debianDir.child("rules").chmod(0755)

    args = ["dpkg-buildpackage", "-rfakeroot"]
    if config["unsigned"]:
        args = args + ["-uc", "-us"]

    # Build deb
    job = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=buildDir.path)
    stdout, _ = job.communicate()
Ejemplo n.º 48
0
def create_listening_endpoint_from_config(config, cbdir, reactor, log):
    """
    Create a Twisted stream server endpoint from a Crossbar.io transport configuration.

    See: https://twistedmatrix.com/documents/current/api/twisted.internet.interfaces.IStreamServerEndpoint.html

    :param config: The transport configuration.
    :type config: dict
    :param cbdir: Crossbar.io node directory (we need this for TLS key/certificates).
    :type cbdir: str
    :param reactor: The reactor to use for endpoint creation.
    :type reactor: obj

    :returns obj -- An instance implementing IStreamServerEndpoint
    """
    endpoint = None

    # a TCP endpoint
    #
    if config['type'] == 'tcp':

        # the TCP protocol version (v4 or v6)
        #
        version = int(config.get('version', 4))

        # the listening port
        if type(config['port']) is six.text_type:
            # read port from environment variable ..
            try:
                port = int(environ[config['port'][1:]])
            except Exception as e:
                log.warn(
                    "Could not read listening port from env var: {}".format(e))
                raise e
        else:
            port = config['port']

        # the listening interface
        #
        interface = str(config.get('interface', '').strip())

        # the TCP accept queue depth
        #
        backlog = int(config.get('backlog', 50))

        if 'tls' in config:
            # create a TLS server endpoint
            #
            if _HAS_TLS:
                # TLS server context
                context = _create_tls_server_context(config['tls'], cbdir, log)

                if version == 4:
                    endpoint = SSL4ServerEndpoint(reactor,
                                                  port,
                                                  context,
                                                  backlog=backlog,
                                                  interface=interface)
                elif version == 6:
                    raise Exception("TLS on IPv6 not implemented")
                else:
                    raise Exception(
                        "invalid TCP protocol version {}".format(version))
            else:
                raise Exception(
                    "TLS transport requested, but TLS packages not available:\n{}"
                    .format(_LACKS_TLS_MSG))

        else:
            # create a non-TLS server endpoint
            #
            if version == 4:
                endpoint = TCP4ServerEndpoint(reactor,
                                              port,
                                              backlog=backlog,
                                              interface=interface)
            elif version == 6:
                endpoint = TCP6ServerEndpoint(reactor,
                                              port,
                                              backlog=backlog,
                                              interface=interface)
            else:
                raise Exception(
                    "invalid TCP protocol version {}".format(version))

    # a Unix Domain Socket endpoint
    #
    elif config['type'] == 'unix':

        # the accept queue depth
        #
        backlog = int(config.get('backlog', 50))

        # the path
        #
        path = FilePath(join(cbdir, config['path']))

        # if there is already something there, delete it.
        #
        if path.exists():
            log.info(("{path} exists, attempting to remove before using as a "
                      "UNIX socket"),
                     path=path)
            path.remove()

        # create the endpoint
        #
        endpoint = UNIXServerEndpoint(reactor, path.path, backlog=backlog)

    else:
        raise Exception("invalid endpoint type '{}'".format(config['type']))

    return endpoint
Ejemplo n.º 49
0
def run(args=None):
    """
    Parses the configuration options in C{args} and runs C{dpkg-buildpackage}
    to create a .deb file.

    @param args: List of strings representing the C{tap2deb} configuration
        options.
    @type args: L{list}
    """
    try:
        config = MyOptions()
        config.parseOptions(args)
    except usage.error as ue:
        sys.exit("%s: %s" % (sys.argv[0], ue))

    tapFile = config['tapfile']
    baseTapFile = os.path.basename(config['tapfile'])
    protocol = (config['protocol'] or os.path.splitext(baseTapFile)[0])
    debFile = config['debfile'] or 'twisted-' + protocol
    version = config['set-version']
    maintainer = config['maintainer']
    description = config['description'] or (
        'A Twisted-based server for %(protocol)s' % vars())
    longDescription = config['long_description'] or\
        'Automatically created by tap2deb'
    twistdOption = type_dict[config['type']]
    date = now()
    directory = debFile + '-' + version
    pythonVersion = '%s.%s' % sys.version_info[:2]
    buildDir = FilePath('.build').child(directory)

    if buildDir.exists():
        buildDir.remove()

    debianDir = buildDir.child('debian')
    debianDir.child('source').makedirs()
    shutil.copy(tapFile, buildDir.path)

    debianDir.child('README.Debian').setContent(
    '''This package was auto-generated by tap2deb\n''')

    debianDir.child('conffiles').setContent(
    '''\
/etc/init.d/%(debFile)s
/etc/default/%(debFile)s
/etc/%(baseTapFile)s
''' % vars())

    debianDir.child('default').setContent(
    '''\
pidfile=/var/run/%(debFile)s.pid
rundir=/var/lib/%(debFile)s/
file=/etc/%(tapFile)s
logfile=/var/log/%(debFile)s.log
 ''' % vars())

    debianDir.child('init.d').setContent(
    '''\
#!/bin/sh

PATH=/sbin:/bin:/usr/sbin:/usr/bin

pidfile=/var/run/%(debFile)s.pid \
rundir=/var/lib/%(debFile)s/ \
file=/etc/%(tapFile)s \
logfile=/var/log/%(debFile)s.log

[ -r /etc/default/%(debFile)s ] && . /etc/default/%(debFile)s

test -x /usr/bin/twistd%(pythonVersion)s || exit 0
test -r $file || exit 0
test -r /usr/share/%(debFile)s/package-installed || exit 0


case "$1" in
    start)
        echo -n "Starting %(debFile)s: twistd"
        start-stop-daemon --start --quiet --exec /usr/bin/twistd%(pythonVersion)s -- \
                          --pidfile=$pidfile \
                          --rundir=$rundir \
                          --%(twistdOption)s=$file \
                          --logfile=$logfile
        echo "."
    ;;

    stop)
        echo -n "Stopping %(debFile)s: twistd"
        start-stop-daemon --stop --quiet  \
            --pidfile $pidfile
        echo "."
    ;;

    restart)
        $0 stop
        $0 start
    ;;

    force-reload)
        $0 restart
    ;;

    *)
        echo "Usage: /etc/init.d/%(debFile)s {start|stop|restart|force-reload}" >&2
        exit 1
    ;;
esac

exit 0
''' % vars())

    debianDir.child('init.d').chmod(0755)

    debianDir.child('postinst').setContent(
    '''\
#!/bin/sh
update-rc.d %(debFile)s defaults >/dev/null
invoke-rc.d %(debFile)s start
''' % vars())

    debianDir.child('prerm').setContent(
    '''\
#!/bin/sh
invoke-rc.d %(debFile)s stop
''' % vars())

    debianDir.child('postrm').setContent(
    '''\
#!/bin/sh
if [ "$1" = purge ]; then
        update-rc.d %(debFile)s remove >/dev/null
fi
''' % vars())

    debianDir.child('changelog').setContent(
    '''\
%(debFile)s (%(version)s) unstable; urgency=low

  * Created by tap2deb

 -- %(maintainer)s  %(date)s

''' % vars())

    debianDir.child('control').setContent(
    '''\
Source: %(debFile)s
Section: net
Priority: extra
Maintainer: %(maintainer)s
Build-Depends-Indep: debhelper
Standards-Version: 3.5.6

Package: %(debFile)s
Architecture: all
Depends: python%(pythonVersion)s-twisted
Description: %(description)s
 %(longDescription)s
''' % vars())

    debianDir.child('copyright').setContent(
    '''\
This package was auto-debianized by %(maintainer)s on
%(date)s

It was auto-generated by tap2deb

Upstream Author(s):
Moshe Zadka <*****@*****.**> -- tap2deb author

Copyright:

Insert copyright here.
''' % vars())

    debianDir.child('dirs').setContent(
    '''\
etc/init.d
etc/default
var/lib/%(debFile)s
usr/share/doc/%(debFile)s
usr/share/%(debFile)s
''' % vars())

    debianDir.child('rules').setContent(
    '''\
#!/usr/bin/make -f

export DH_COMPAT=1

build: build-stamp
build-stamp:
	dh_testdir
	touch build-stamp

clean:
	dh_testdir
	dh_testroot
	rm -f build-stamp install-stamp
	dh_clean

install: install-stamp
install-stamp: build-stamp
	dh_testdir
	dh_testroot
	dh_clean -k
	dh_installdirs

	# Add here commands to install the package into debian/tmp.
	cp %(baseTapFile)s debian/tmp/etc/
	cp debian/init.d debian/tmp/etc/init.d/%(debFile)s
	cp debian/default debian/tmp/etc/default/%(debFile)s
	cp debian/copyright debian/tmp/usr/share/doc/%(debFile)s/
	cp debian/README.Debian debian/tmp/usr/share/doc/%(debFile)s/
	touch debian/tmp/usr/share/%(debFile)s/package-installed
	touch install-stamp

binary-arch: build install

binary-indep: build install
	dh_testdir
	dh_testroot
	dh_strip
	dh_compress
	dh_installchangelogs
	dh_fixperms
	dh_installdeb
	dh_shlibdeps
	dh_gencontrol
	dh_md5sums
	dh_builddeb

source diff:
	@echo >&2 'source and diff are obsolete - use dpkg-source -b'; false

binary: binary-indep binary-arch
.PHONY: build clean binary-indep binary-arch binary install
''' % vars())

    debianDir.child('rules').chmod(0755)

    args = ["dpkg-buildpackage", "-rfakeroot"]
    if config['unsigned']:
        args = args + ['-uc', '-us']

    # Build deb
    job = subprocess.Popen(args, stdout=subprocess.PIPE,
                           stderr=subprocess.STDOUT, cwd=buildDir.path)
    stdout, _ = job.communicate()
class TestContentDirectoryServer(unittest.TestCase):

    def setUp(self):
        self.tmp_content = FilePath('tmp_content_coherence-%d'%os.getpid())
        f = self.tmp_content.child('content')
        audio = f.child('audio')
        f.child('images').makedirs()
        f.child('video').makedirs()
        album = audio.child('album-1')
        album.makedirs()
        album.child('track-1.mp3').touch()
        album.child('track-2.mp3').touch()
        album = audio.child('album-2')
        album.makedirs()
        album.child('track-1.ogg').touch()
        album.child('track-2.ogg').touch()
        louie.reset()
        self.coherence = Coherence({'unittest':'yes','logmode':'debug','subsystem_log':{'controlpoint':'error',
                                                                                        'action':'error',
                                                                                        'soap':'error'},'controlpoint':'yes'})
        self.uuid = UUID()
        p = self.coherence.add_plugin('FSStore',
                                      name='MediaServer-%d'%os.getpid(),
                                      content=self.tmp_content.path,
                                      uuid=str(self.uuid))

    def tearDown(self):
        self.tmp_content.remove()

        def cleaner(r):
            self.coherence.clear()
            return r

        dl = self.coherence.shutdown()
        dl.addBoth(cleaner)
        return dl

    def test_Browse(self):
        """ tries to find the activated FSStore backend
            and browses its root.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_second_answer(r,childcount):
                try:
                    self.assertEqual(int(r['TotalMatches']), childcount)
                    d.callback(None)
                except:
                    d.errback()

            def got_first_answer(r):
                try:
                    self.assertEqual(int(r['TotalMatches']), 1)
                except:
                    d.errback()

                didl = DIDLLite.DIDLElement.fromString(r['Result'])
                item = didl.getItems()[0]
                try:
                    self.assertEqual(item.childCount, 3)
                except:
                    d.errback()

                call = mediaserver.client.content_directory.browse(object_id=item.id,
                                                         process_result=False)
                call.addCallback(got_second_answer,item.childCount)
                return call

            call = mediaserver.client.content_directory.browse(process_result=False)
            call.addCallback(got_first_answer)

        self.coherence.ctrl.add_query(DeviceQuery('uuid', str(self.uuid), the_result, timeout=10, oneshot=True))
        return d

    def test_Browse_Metadata(self):
        """ tries to find the activated FSStore backend
            and requests metadata for ObjectID 0.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_first_answer(r):
                try:
                    self.assertEqual(int(r['TotalMatches']), 1)
                except:
                    d.errback()
                    return
                didl = DIDLLite.DIDLElement.fromString(r['Result'])
                item = didl.getItems()[0]
                try:
                    self.assertEqual(item.title, 'root')
                except:
                    d.errback()
                    return
                d.callback(None)

            call = mediaserver.client.content_directory.browse(object_id='0',browse_flag='BrowseMetadata',process_result=False)
            call.addCallback(got_first_answer)
            call.addErrback(lambda x: d.errback(None))

        self.coherence.ctrl.add_query(DeviceQuery('uuid', str(self.uuid), the_result, timeout=10, oneshot=True))
        return d

    def test_XBOX_Browse(self):
        """ tries to find the activated FSStore backend
            and browses all audio files.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_first_answer(r):
                """ we expect four audio files here """
                try:
                    self.assertEqual(int(r['TotalMatches']), 4)
                except:
                    d.errback()
                    return
                d.callback(None)

            def my_browse(*args,**kwargs):
                kwargs['ContainerID'] = kwargs['ObjectID']
                del kwargs['ObjectID']
                del kwargs['BrowseFlag']
                kwargs['SearchCriteria'] = ''
                return 'Search',kwargs

            #mediaserver.client.overlay_actions = {'Browse':my_browse}
            mediaserver.client.overlay_headers = {'user-agent':'Xbox/Coherence emulation'}

            call = mediaserver.client.content_directory.browse(object_id='4',process_result=False)
            call.addCallback(got_first_answer)
            call.addErrback(lambda x: d.errback(None))

        self.coherence.ctrl.add_query(DeviceQuery('uuid', str(self.uuid), the_result, timeout=10, oneshot=True))
        return d

    def test_XBOX_Browse_Metadata(self):
        """ tries to find the activated FSStore backend
            and requests metadata for ObjectID 0.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_first_answer(r):
                """ we expect one item here """
                try:
                    self.assertEqual(int(r['TotalMatches']), 1)
                except:
                    d.errback()
                    return
                didl = DIDLLite.DIDLElement.fromString(r['Result'])
                item = didl.getItems()[0]
                try:
                    self.assertEqual(item.title, 'root')
                except:
                    d.errback()
                    return
                d.callback(None)

            mediaserver.client.overlay_headers = {'user-agent':'Xbox/Coherence emulation'}

            call = mediaserver.client.content_directory.browse(object_id='0',browse_flag='BrowseMetadata',process_result=False)
            call.addCallback(got_first_answer)
            call.addErrback(lambda x: d.errback(None))

        self.coherence.ctrl.add_query(DeviceQuery('uuid', str(self.uuid), the_result, timeout=10, oneshot=True))
        return d

    def test_XBOX_Search(self):
        """ tries to find the activated FSStore backend
            and searches for all its audio files.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_first_answer(r):
                """ we expect four audio files here """
                try:
                    self.assertEqual(len(r), 4)
                except:
                    d.errback()
                d.callback(None)

            mediaserver.client.overlay_headers = {'user-agent':'Xbox/Coherence emulation'}

            call = mediaserver.client.content_directory.search(container_id='4',
                                                               criteria='')
            call.addCallback(got_first_answer)
            call.addErrback(lambda x: d.errback(None))

        self.coherence.ctrl.add_query(DeviceQuery('uuid', str(self.uuid), the_result, timeout=10, oneshot=True))
        return d
Ejemplo n.º 51
0
def publish_artifacts_main(args, base_path, top_level):
    """
    Publish release artifacts.

    :param list args: The arguments passed to the scripts.
    :param FilePath base_path: The executable being run.
    :param FilePath top_level: The top-level of the flocker repository.
    """
    options = UploadOptions()

    try:
        options.parseOptions(args)
    except UsageError as e:
        sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
        raise SystemExit(1)
    except NotARelease:
        sys.stderr.write("%s: Can't publish artifacts for a non-release.\n" %
                         (base_path.basename(), ))
        raise SystemExit(1)
    except DocumentationRelease:
        sys.stderr.write("%s: Can't publish artifacts for a documentation "
                         "release.\n" % (base_path.basename(), ))
        raise SystemExit(1)

    dispatcher = ComposedDispatcher(
        [boto_dispatcher, yum_dispatcher, base_dispatcher])

    scratch_directory = FilePath(tempfile.mkdtemp(prefix=b'flocker-upload-'))
    scratch_directory.child('packages').createDirectory()
    scratch_directory.child('python').createDirectory()
    scratch_directory.child('pip').createDirectory()
    scratch_directory.child('vagrant').createDirectory()
    scratch_directory.child('homebrew').createDirectory()

    box_type = "flocker-tutorial"
    vagrant_prefix = 'vagrant/tutorial/'

    box_name = "{box_type}-{version}.box".format(
        box_type=box_type,
        version=options['flocker-version'],
    )

    box_url = "https://{bucket}.s3.amazonaws.com/{key}".format(
        bucket=options['target'],
        key=vagrant_prefix + box_name,
    )

    try:
        sync_perform(
            dispatcher=dispatcher,
            effect=sequence([
                upload_packages(
                    scratch_directory=scratch_directory.child('packages'),
                    target_bucket=options['target'],
                    version=options['flocker-version'],
                    build_server=options['build-server'],
                    top_level=top_level,
                ),
                upload_python_packages(
                    scratch_directory=scratch_directory.child('python'),
                    target_bucket=options['target'],
                    top_level=top_level,
                    output=sys.stdout,
                    error=sys.stderr,
                ),
                upload_pip_index(
                    scratch_directory=scratch_directory.child('pip'),
                    target_bucket=options['target'],
                ),
                Effect(
                    CopyS3Keys(
                        source_bucket=DEV_ARCHIVE_BUCKET,
                        source_prefix=vagrant_prefix,
                        destination_bucket=options['target'],
                        destination_prefix=vagrant_prefix,
                        keys=[box_name],
                    )),
                publish_vagrant_metadata(
                    version=options['flocker-version'],
                    box_url=box_url,
                    scratch_directory=scratch_directory.child('vagrant'),
                    box_name=box_type,
                    target_bucket=options['target'],
                ),
            ]),
        )

        publish_homebrew_recipe(
            homebrew_repo_url=options['homebrew-tap'],
            version=options['flocker-version'],
            source_bucket=options['target'],
            scratch_directory=scratch_directory.child('homebrew'),
            top_level=top_level,
        )

    finally:
        scratch_directory.remove()
class TestContentDirectoryServer(unittest.TestCase):
    def setUp(self):
        self.tmp_content = FilePath('tmp_content_coherence-%d' % os.getpid())
        f = self.tmp_content.child('content')
        audio = f.child('audio')
        f.child('images').makedirs()
        f.child('video').makedirs()
        album = audio.child('album-1')
        album.makedirs()
        album.child('track-1.mp3').touch()
        album.child('track-2.mp3').touch()
        album = audio.child('album-2')
        album.makedirs()
        album.child('track-1.ogg').touch()
        album.child('track-2.ogg').touch()
        louie.reset()
        self.coherence = Coherence({
            'unittest': 'yes',
            'logmode': 'debug',
            'subsystem_log': {
                'controlpoint': 'error',
                'action': 'error',
                'soap': 'error'
            },
            'controlpoint': 'yes'
        })
        self.uuid = UUID()
        p = self.coherence.add_plugin('FSStore',
                                      name='MediaServer-%d' % os.getpid(),
                                      content=self.tmp_content.path,
                                      uuid=str(self.uuid))

    def tearDown(self):
        self.tmp_content.remove()

        def cleaner(r):
            self.coherence.clear()
            return r

        dl = self.coherence.shutdown()
        dl.addBoth(cleaner)
        return dl

    def test_Browse(self):
        """ tries to find the activated FSStore backend
            and browses its root.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_second_answer(r, childcount):
                try:
                    self.assertEqual(int(r['TotalMatches']), childcount)
                    d.callback(None)
                except:
                    d.errback()

            def got_first_answer(r):
                try:
                    self.assertEqual(int(r['TotalMatches']), 1)
                except:
                    d.errback()

                didl = DIDLLite.DIDLElement.fromString(r['Result'])
                item = didl.getItems()[0]
                try:
                    self.assertEqual(item.childCount, 3)
                except:
                    d.errback()

                call = mediaserver.client.content_directory.browse(
                    object_id=item.id, process_result=False)
                call.addCallback(got_second_answer, item.childCount)
                return call

            call = mediaserver.client.content_directory.browse(
                process_result=False)
            call.addCallback(got_first_answer)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        str(self.uuid),
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_Browse_Metadata(self):
        """ tries to find the activated FSStore backend
            and requests metadata for ObjectID 0.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_first_answer(r):
                try:
                    self.assertEqual(int(r['TotalMatches']), 1)
                except:
                    d.errback()
                    return
                didl = DIDLLite.DIDLElement.fromString(r['Result'])
                item = didl.getItems()[0]
                try:
                    self.assertEqual(item.title, 'root')
                except:
                    d.errback()
                    return
                d.callback(None)

            call = mediaserver.client.content_directory.browse(
                object_id='0',
                browse_flag='BrowseMetadata',
                process_result=False)
            call.addCallback(got_first_answer)
            call.addErrback(lambda x: d.errback(None))

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        str(self.uuid),
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_XBOX_Browse(self):
        """ tries to find the activated FSStore backend
            and browses all audio files.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_first_answer(r):
                """ we expect four audio files here """
                try:
                    self.assertEqual(int(r['TotalMatches']), 4)
                except:
                    d.errback()
                    return
                d.callback(None)

            def my_browse(*args, **kwargs):
                kwargs['ContainerID'] = kwargs['ObjectID']
                del kwargs['ObjectID']
                del kwargs['BrowseFlag']
                kwargs['SearchCriteria'] = ''
                return 'Search', kwargs

            #mediaserver.client.overlay_actions = {'Browse':my_browse}
            mediaserver.client.overlay_headers = {
                'user-agent': 'Xbox/Coherence emulation'
            }

            call = mediaserver.client.content_directory.browse(
                object_id='4', process_result=False)
            call.addCallback(got_first_answer)
            call.addErrback(lambda x: d.errback(None))

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        str(self.uuid),
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_XBOX_Browse_Metadata(self):
        """ tries to find the activated FSStore backend
            and requests metadata for ObjectID 0.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_first_answer(r):
                """ we expect one item here """
                try:
                    self.assertEqual(int(r['TotalMatches']), 1)
                except:
                    d.errback()
                    return
                didl = DIDLLite.DIDLElement.fromString(r['Result'])
                item = didl.getItems()[0]
                try:
                    self.assertEqual(item.title, 'root')
                except:
                    d.errback()
                    return
                d.callback(None)

            mediaserver.client.overlay_headers = {
                'user-agent': 'Xbox/Coherence emulation'
            }

            call = mediaserver.client.content_directory.browse(
                object_id='0',
                browse_flag='BrowseMetadata',
                process_result=False)
            call.addCallback(got_first_answer)
            call.addErrback(lambda x: d.errback(None))

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        str(self.uuid),
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_XBOX_Search(self):
        """ tries to find the activated FSStore backend
            and searches for all its audio files.
        """
        d = Deferred()

        def the_result(mediaserver):
            try:
                self.assertEqual(str(self.uuid), mediaserver.udn)
            except:
                d.errback()

            def got_first_answer(r):
                """ we expect four audio files here """
                try:
                    self.assertEqual(len(r), 4)
                except:
                    d.errback()
                d.callback(None)

            mediaserver.client.overlay_headers = {
                'user-agent': 'Xbox/Coherence emulation'
            }

            call = mediaserver.client.content_directory.search(
                container_id='4', criteria='')
            call.addCallback(got_first_answer)
            call.addErrback(lambda x: d.errback(None))

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        str(self.uuid),
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d
Ejemplo n.º 53
0
def temporary_directory(suffix):
    temporary_directory = FilePath(mkdtemp(suffix=suffix))
    try:
        yield temporary_directory
    finally:
        temporary_directory.remove()
Ejemplo n.º 54
0
def create_listening_endpoint_from_config(config, cbdir, reactor, log):
    """
    Create a Twisted stream server endpoint from a Crossbar.io transport configuration.

    See: https://twistedmatrix.com/documents/current/api/twisted.internet.interfaces.IStreamServerEndpoint.html

    :param config: The transport configuration.
    :type config: dict
    :param cbdir: Crossbar.io node directory (we need this for TLS key/certificates).
    :type cbdir: str
    :param reactor: The reactor to use for endpoint creation.
    :type reactor: obj

    :returns obj -- An instance implementing IStreamServerEndpoint
    """
    endpoint = None

    # a TCP endpoint
    #
    if config['type'] == 'tcp':

        # the TCP protocol version (v4 or v6)
        #
        version = int(config.get('version', 4))

        # the listening port
        if type(config['port']) is six.text_type:
            # read port from environment variable ..
            try:
                port = int(environ[config['port'][1:]])
            except Exception as e:
                log.warn("Could not read listening port from env var: {e}", e=e)
                raise
        else:
            port = config['port']

        # the listening interface
        #
        interface = str(config.get('interface', '').strip())

        # the TCP accept queue depth
        #
        backlog = int(config.get('backlog', 50))

        if 'tls' in config:
            # create a TLS server endpoint
            #
            if _HAS_TLS:
                # TLS server context
                context = _create_tls_server_context(config['tls'], cbdir, log)

                if version == 4:
                    endpoint = SSL4ServerEndpoint(reactor,
                                                  port,
                                                  context,
                                                  backlog=backlog,
                                                  interface=interface)
                elif version == 6:
                    raise Exception("TLS on IPv6 not implemented")
                else:
                    raise Exception("invalid TCP protocol version {}".format(version))
            else:
                raise Exception("TLS transport requested, but TLS packages not available:\n{}".format(_LACKS_TLS_MSG))

        else:
            # create a non-TLS server endpoint
            #
            if version == 4:
                endpoint = TCP4ServerEndpoint(reactor,
                                              port,
                                              backlog=backlog,
                                              interface=interface)
            elif version == 6:
                endpoint = TCP6ServerEndpoint(reactor,
                                              port,
                                              backlog=backlog,
                                              interface=interface)
            else:
                raise Exception("invalid TCP protocol version {}".format(version))

    # a Unix Domain Socket endpoint
    #
    elif config['type'] == 'unix':

        # the accept queue depth
        #
        backlog = int(config.get('backlog', 50))

        # the path
        #
        path = FilePath(join(cbdir, config['path']))

        # if there is already something there, delete it.
        #
        if path.exists():
            log.info(("{path} exists, attempting to remove before using as a "
                     "UNIX socket"), path=path)
            path.remove()

        # create the endpoint
        #
        endpoint = UNIXServerEndpoint(reactor, path.path, backlog=backlog)

    # twisted endpoint-string
    elif config['type'] == 'twisted':
        endpoint = serverFromString(reactor, config['server_string'])

    # tor endpoint
    elif config['type'] == 'onion':  # or "tor"? r "tor_onion"?
        port = config['port']
        private_key_fname = _ensure_absolute(config[u'private_key_file'], cbdir)
        tor_control_ep = create_connecting_endpoint_from_config(
            config[u'tor_control_endpoint'], cbdir, reactor, log
        )

        try:
            with open(private_key_fname, 'r') as f:
                private_key = f.read().strip()
        except (IOError, OSError):
            private_key = None

        @implementer(IStreamServerEndpoint)
        class _EphemeralOnion(object):

            @defer.inlineCallbacks
            def listen(self, proto_factory):
                # we don't care which local TCP port we listen on, but
                # we do need to know it
                local_ep = TCP4ServerEndpoint(reactor, 0, interface=u"127.0.0.1")
                target_port = yield local_ep.listen(proto_factory)
                tor = yield txtorcon.connect(
                    reactor,
                    tor_control_ep,
                )

                # create and add the service
                hs = txtorcon.EphemeralHiddenService(
                    ports=["{} 127.0.0.1:{}".format(port, target_port.getHost().port)],
                    key_blob_or_type=private_key if private_key else "NEW:BEST",
                )
                log.info("Uploading descriptors can take more than 30s")
                yield hs.add_to_tor(tor.protocol)

                # if it's new, store our private key
                # XXX better "if private_key is None"?
                if not exists(private_key_fname):
                    with open(private_key_fname, 'w') as f:
                        f.write(hs.private_key)
                    log.info("Wrote private key to '{fname}'", fname=private_key_fname)

                addr = txtorcon.TorOnionAddress(hs.hostname, port)
                log.info(
                    "Listening on Tor onion service {addr.onion_uri}:{addr.onion_port}"
                    " with local port {local_port}",
                    addr=addr,
                    local_port=target_port.getHost().port,
                )
                defer.returnValue(addr)
        endpoint = _EphemeralOnion()

    else:
        raise Exception("invalid endpoint type '{}'".format(config['type']))

    return endpoint
Ejemplo n.º 55
0
def create_listening_endpoint_from_config(config, cbdir, reactor, log):
    """
    Create a Twisted stream server endpoint from a Crossbar.io transport configuration.

    See: https://twistedmatrix.com/documents/current/api/twisted.internet.interfaces.IStreamServerEndpoint.html

    :param config: The transport configuration.
    :type config: dict
    :param cbdir: Crossbar.io node directory (we need this for TLS key/certificates).
    :type cbdir: str
    :param reactor: The reactor to use for endpoint creation.
    :type reactor: obj

    :returns obj -- An instance implementing IStreamServerEndpoint
    """
    endpoint = None

    # a TCP endpoint
    #
    if config['type'] == 'tcp':

        # the TCP protocol version (v4 or v6)
        #
        version = int(config.get('version', 4))

        # the listening port
        if type(config['port']) is six.text_type:
            # read port from environment variable ..
            try:
                port = int(environ[config['port'][1:]])
            except Exception as e:
                log.warn("Could not read listening port from env var: {}".format(e))
                raise
        else:
            port = config['port']

        # the listening interface
        #
        interface = str(config.get('interface', '').strip())

        # the TCP accept queue depth
        #
        backlog = int(config.get('backlog', 50))

        if 'tls' in config:
            # create a TLS server endpoint
            #
            if _HAS_TLS:
                # TLS server context
                context = _create_tls_server_context(config['tls'], cbdir, log)

                if version == 4:
                    endpoint = SSL4ServerEndpoint(reactor,
                                                  port,
                                                  context,
                                                  backlog=backlog,
                                                  interface=interface)
                elif version == 6:
                    raise Exception("TLS on IPv6 not implemented")
                else:
                    raise Exception("invalid TCP protocol version {}".format(version))
            else:
                raise Exception("TLS transport requested, but TLS packages not available:\n{}".format(_LACKS_TLS_MSG))

        else:
            # create a non-TLS server endpoint
            #
            if version == 4:
                endpoint = TCP4ServerEndpoint(reactor,
                                              port,
                                              backlog=backlog,
                                              interface=interface)
            elif version == 6:
                endpoint = TCP6ServerEndpoint(reactor,
                                              port,
                                              backlog=backlog,
                                              interface=interface)
            else:
                raise Exception("invalid TCP protocol version {}".format(version))

    # a Unix Domain Socket endpoint
    #
    elif config['type'] == 'unix':

        # the accept queue depth
        #
        backlog = int(config.get('backlog', 50))

        # the path
        #
        path = FilePath(join(cbdir, config['path']))

        # if there is already something there, delete it.
        #
        if path.exists():
            log.info(("{path} exists, attempting to remove before using as a "
                     "UNIX socket"), path=path)
            path.remove()

        # create the endpoint
        #
        endpoint = UNIXServerEndpoint(reactor, path.path, backlog=backlog)

    else:
        raise Exception("invalid endpoint type '{}'".format(config['type']))

    return endpoint
Ejemplo n.º 56
0
def run(args=None):
    """
    Parses the configuration options in C{args} and runs C{dpkg-buildpackage}
    to create a .deb file.

    @param args: List of strings representing the C{tap2deb} configuration
        options.
    @type args: L{list}
    """
    try:
        config = MyOptions()
        config.parseOptions(args)
    except usage.error as ue:
        sys.exit("%s: %s" % (sys.argv[0], ue))

    tapFile = config['tapfile']
    baseTapFile = os.path.basename(config['tapfile'])
    protocol = (config['protocol'] or os.path.splitext(baseTapFile)[0])
    debFile = config['debfile'] or 'twisted-' + protocol
    version = config['set-version']
    maintainer = config['maintainer']
    description = config['description'] or (
        'A Twisted-based server for %(protocol)s' % vars())
    longDescription = config['long_description'] or\
        'Automatically created by tap2deb'
    twistdOption = type_dict[config['type']]
    date = now()
    directory = debFile + '-' + version
    pythonVersion = '%s.%s' % sys.version_info[:2]
    buildDir = FilePath('.build').child(directory)

    if buildDir.exists():
        buildDir.remove()

    debianDir = buildDir.child('debian')
    debianDir.child('source').makedirs()
    shutil.copy(tapFile, buildDir.path)

    debianDir.child('README.Debian').setContent(
        '''This package was auto-generated by tap2deb\n''')

    debianDir.child('conffiles').setContent('''\
/etc/init.d/%(debFile)s
/etc/default/%(debFile)s
/etc/%(baseTapFile)s
''' % vars())

    debianDir.child('default').setContent('''\
pidfile=/var/run/%(debFile)s.pid
rundir=/var/lib/%(debFile)s/
file=/etc/%(tapFile)s
logfile=/var/log/%(debFile)s.log
 ''' % vars())

    debianDir.child('init.d').setContent('''\
#!/bin/sh

PATH=/sbin:/bin:/usr/sbin:/usr/bin

pidfile=/var/run/%(debFile)s.pid \
rundir=/var/lib/%(debFile)s/ \
file=/etc/%(tapFile)s \
logfile=/var/log/%(debFile)s.log

[ -r /etc/default/%(debFile)s ] && . /etc/default/%(debFile)s

test -x /usr/bin/twistd%(pythonVersion)s || exit 0
test -r $file || exit 0
test -r /usr/share/%(debFile)s/package-installed || exit 0


case "$1" in
    start)
        echo -n "Starting %(debFile)s: twistd"
        start-stop-daemon --start --quiet --exec /usr/bin/twistd%(pythonVersion)s -- \
                          --pidfile=$pidfile \
                          --rundir=$rundir \
                          --%(twistdOption)s=$file \
                          --logfile=$logfile
        echo "."
    ;;

    stop)
        echo -n "Stopping %(debFile)s: twistd"
        start-stop-daemon --stop --quiet  \
            --pidfile $pidfile
        echo "."
    ;;

    restart)
        $0 stop
        $0 start
    ;;

    force-reload)
        $0 restart
    ;;

    *)
        echo "Usage: /etc/init.d/%(debFile)s {start|stop|restart|force-reload}" >&2
        exit 1
    ;;
esac

exit 0
''' % vars())

    debianDir.child('init.d').chmod(0755)

    debianDir.child('postinst').setContent('''\
#!/bin/sh
update-rc.d %(debFile)s defaults >/dev/null
invoke-rc.d %(debFile)s start
''' % vars())

    debianDir.child('prerm').setContent('''\
#!/bin/sh
invoke-rc.d %(debFile)s stop
''' % vars())

    debianDir.child('postrm').setContent('''\
#!/bin/sh
if [ "$1" = purge ]; then
        update-rc.d %(debFile)s remove >/dev/null
fi
''' % vars())

    debianDir.child('changelog').setContent('''\
%(debFile)s (%(version)s) unstable; urgency=low

  * Created by tap2deb

 -- %(maintainer)s  %(date)s

''' % vars())

    debianDir.child('control').setContent('''\
Source: %(debFile)s
Section: net
Priority: extra
Maintainer: %(maintainer)s
Build-Depends-Indep: debhelper
Standards-Version: 3.5.6

Package: %(debFile)s
Architecture: all
Depends: python%(pythonVersion)s-twisted
Description: %(description)s
 %(longDescription)s
''' % vars())

    debianDir.child('copyright').setContent('''\
This package was auto-debianized by %(maintainer)s on
%(date)s

It was auto-generated by tap2deb

Upstream Author(s): 
Moshe Zadka <*****@*****.**> -- tap2deb author

Copyright:

Insert copyright here.
''' % vars())

    debianDir.child('dirs').setContent('''\
etc/init.d
etc/default
var/lib/%(debFile)s
usr/share/doc/%(debFile)s
usr/share/%(debFile)s
''' % vars())

    debianDir.child('rules').setContent('''\
#!/usr/bin/make -f

export DH_COMPAT=1

build: build-stamp
build-stamp:
	dh_testdir
	touch build-stamp

clean:
	dh_testdir
	dh_testroot
	rm -f build-stamp install-stamp
	dh_clean

install: install-stamp
install-stamp: build-stamp
	dh_testdir
	dh_testroot
	dh_clean -k
	dh_installdirs

	# Add here commands to install the package into debian/tmp.
	cp %(baseTapFile)s debian/tmp/etc/
	cp debian/init.d debian/tmp/etc/init.d/%(debFile)s
	cp debian/default debian/tmp/etc/default/%(debFile)s
	cp debian/copyright debian/tmp/usr/share/doc/%(debFile)s/
	cp debian/README.Debian debian/tmp/usr/share/doc/%(debFile)s/
	touch debian/tmp/usr/share/%(debFile)s/package-installed
	touch install-stamp

binary-arch: build install

binary-indep: build install
	dh_testdir
	dh_testroot
	dh_strip
	dh_compress
	dh_installchangelogs
	dh_fixperms
	dh_installdeb
	dh_shlibdeps
	dh_gencontrol
	dh_md5sums
	dh_builddeb

source diff:
	@echo >&2 'source and diff are obsolete - use dpkg-source -b'; false

binary: binary-indep binary-arch
.PHONY: build clean binary-indep binary-arch binary install
''' % vars())

    debianDir.child('rules').chmod(0755)

    args = ["dpkg-buildpackage", "-rfakeroot"]
    if config['unsigned']:
        args = args + ['-uc', '-us']

    # Build deb
    job = subprocess.Popen(args,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.STDOUT,
                           cwd=buildDir.path)
    stdout, _ = job.communicate()
Ejemplo n.º 57
0
class ReadSessions(unittest.TestCase):
    test_data = b"""line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.temp_dir = FilePath(tempfile.mkdtemp()).asBytesMode()
        self.target = self.temp_dir.child(b'foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = ReadSession(self.reader, _clock=self.clock)
        self.rs.transport = self.transport
        self.rs.startProtocol()

    @inlineCallbacks
    def test_ERROR(self):
        err_dgram = ERRORDatagram.from_code(ERR_NOT_DEFINED, b'no reason')
        yield self.rs.datagramReceived(err_dgram)
        self.assertFalse(self.transport.value())
        self.assertTrue(self.transport.disconnecting)

    @inlineCallbacks
    def test_ACK_invalid_blocknum(self):
        ack_datagram = ACKDatagram(3)
        yield self.rs.datagramReceived(ack_datagram)
        self.assertFalse(self.transport.disconnecting)
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertTrue(isinstance(err_dgram, ERRORDatagram))
        self.addCleanup(self.rs.cancel)

    @inlineCallbacks
    def test_ACK_stale_blocknum(self):
        self.rs.blocknum = 2
        ack_datagram = ACKDatagram(1)
        yield self.rs.datagramReceived(ack_datagram)
        self.assertFalse(self.transport.disconnecting)
        self.assertFalse(
            self.transport.value(),
            "Stale ACK datagram, we should not write anything back")
        self.addCleanup(self.rs.cancel)

    def test_ACK(self):
        self.rs.block_size = 5
        self.rs.blocknum = 1
        ack_datagram = ACKDatagram(1)
        d = self.rs.datagramReceived(ack_datagram)

        def cb(ign):
            self.clock.advance(0.1)
            self.assertFalse(self.transport.disconnecting)
            data_datagram = TFTPDatagramFactory(
                *split_opcode(self.transport.value()))
            self.assertEqual(data_datagram.data, b'line1')
            self.assertFalse(
                self.rs.completed,
                "Got enough bytes from the reader, there is no reason to stop")

        d.addCallback(cb)
        self.clock.advance(2.5)
        self.addCleanup(self.rs.cancel)
        return d

    def test_ACK_finished(self):
        self.rs.block_size = 512
        self.rs.blocknum = 1

        # Send a terminating datagram
        ack_datagram = ACKDatagram(1)
        d = self.rs.datagramReceived(ack_datagram)

        def cb(ign):
            self.clock.advance(0.1)
            ack_datagram = ACKDatagram(2)
            # This datagram doesn't trigger any sends
            self.rs.datagramReceived(ack_datagram)

            self.assertEqual(self.transport.value(),
                             DATADatagram(2, self.test_data).to_wire())
            self.assertTrue(
                self.rs.completed,
                "Data length is less, than blocksize, time to stop")

        self.addCleanup(self.rs.cancel)
        d.addCallback(cb)
        self.clock.advance(3)
        return d

    def test_ACK_backoff(self):
        self.rs.block_size = 5
        self.rs.blocknum = 1

        ack_datagram = ACKDatagram(1)
        d = self.rs.datagramReceived(ack_datagram)

        def cb(ign):

            self.clock.pump((1, ) * 4)
            # Sent two times - initial send and a retransmit after first timeout
            self.assertEqual(self.transport.value(),
                             DATADatagram(2, self.test_data[:5]).to_wire() * 2)

            # Sent three times - initial send and two retransmits
            self.clock.pump((1, ) * 5)
            self.assertEqual(self.transport.value(),
                             DATADatagram(2, self.test_data[:5]).to_wire() * 3)

            # Sent still three times - initial send, two retransmits and the last wait
            self.clock.pump((1, ) * 10)
            self.assertEqual(self.transport.value(),
                             DATADatagram(2, self.test_data[:5]).to_wire() * 3)

            self.assertTrue(self.transport.disconnecting)

        d.addCallback(cb)
        self.clock.advance(2.5)
        return d

    @inlineCallbacks
    def test_failed_read(self):
        self.reader.finish()
        self.rs.reader = FailingReader()
        self.rs.blocknum = 1
        ack_datagram = ACKDatagram(1)
        yield self.rs.datagramReceived(ack_datagram)
        self.flushLoggedErrors()
        self.clock.advance(0.1)
        err_datagram = TFTPDatagramFactory(
            *split_opcode(self.transport.value()))
        self.assertTrue(isinstance(err_datagram, ERRORDatagram))
        self.assertTrue(self.transport.disconnecting)

    def test_rollover(self):
        self.rs.block_size = len(self.test_data)
        self.rs.blocknum = 65536
        self.rs.dataFromReader(self.test_data)
        self.assertEqual(self.rs.blocknum, 0)
        self.addCleanup(self.rs.cancel)

    def tearDown(self):
        self.temp_dir.remove()
Ejemplo n.º 58
0
class TestContentDirectoryServer(unittest.TestCase):
    def setUp(self):
        self.tmp_content = FilePath(self.mktemp())
        f = self.tmp_content.child('content')
        audio = f.child('audio')
        f.child('images').makedirs()
        f.child('video').makedirs()
        album = audio.child('album-1')
        album.makedirs()
        album.child('track-1.mp3').touch()
        album.child('track-2.mp3').touch()
        album = audio.child('album-2')
        album.makedirs()
        album.child('track-1.ogg').touch()
        album.child('track-2.ogg').touch()
        self.coherence = Coherence({
            'unittest': 'yes',
            'logmode': 'critical',
            'no-subsystem_log': {
                'controlpoint': 'error',
                'action': 'info',
                'soap': 'error'
            },
            'controlpoint': 'yes'
        })
        self.uuid = str(UUID())
        self.coherence.add_plugin('FSStore',
                                  name='MediaServer-%d' % os.getpid(),
                                  content=self.tmp_content.path,
                                  uuid=self.uuid,
                                  enable_inotify=False)

    def tearDown(self):
        self.tmp_content.remove()

        def cleaner(r):
            self.coherence.clear()
            return r

        dl = self.coherence.shutdown()
        dl.addBoth(cleaner)
        return dl

    def test_Browse(self):
        """ tries to find the activated FSStore backend
            and browses its root.
        """
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.browse(process_result=False)
            call.addCallback(got_first_answer, cdc)

        @wrapped(d)
        def got_first_answer(r, cdc):
            self.assertEqual(int(r['TotalMatches']), 1)
            didl = DIDLLite.DIDLElement.fromString(r['Result'])
            item = didl.getItems()[0]
            self.assertEqual(item.childCount, 3)
            call = cdc.browse(object_id=item.id, process_result=False)
            call.addCallback(got_second_answer, item.childCount)

        @wrapped(d)
        def got_second_answer(r, childcount):
            self.assertEqual(int(r['TotalMatches']), childcount)
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_Browse_Non_Existing_Object(self):
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.browse(object_id='9999.nothing', process_result=False)
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            self.assertIs(r, None)
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_Browse_Metadata(self):
        """ tries to find the activated FSStore backend
            and requests metadata for ObjectID 0.
        """
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            self.assertEqual(self.uuid, mediaserver.udn)
            cdc = mediaserver.client.content_directory
            call = cdc.browse(object_id='0',
                              browse_flag='BrowseMetadata',
                              process_result=False)
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            self.assertEqual(int(r['TotalMatches']), 1)
            didl = DIDLLite.DIDLElement.fromString(r['Result'])
            item = didl.getItems()[0]
            self.assertEqual(item.title, 'root')
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_XBOX_Browse(self):
        """ tries to find the activated FSStore backend
            and browses all audio files.
        """
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            mediaserver.client.overlay_headers = {
                'user-agent': 'Xbox/Coherence emulation'
            }
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.browse(object_id='4', process_result=False)
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            """ we expect four audio files here """
            self.assertEqual(int(r['TotalMatches']), 4)
            d.callback(None)

        d = Deferred()
        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_XBOX_Browse_Metadata(self):
        """ tries to find the activated FSStore backend
            and requests metadata for ObjectID 0.
        """
        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            mediaserver.client.overlay_headers = {
                'user-agent': 'Xbox/Coherence emulation'
            }
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.browse(object_id='0',
                              browse_flag='BrowseMetadata',
                              process_result=False)
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            """ we expect one item here """
            self.assertEqual(int(r['TotalMatches']), 1)
            didl = DIDLLite.DIDLElement.fromString(r['Result'])
            item = didl.getItems()[0]
            self.assertEqual(item.title, 'root')
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d

    def test_XBOX_Search(self):
        """ tries to find the activated FSStore backend
            and searches for all its audio files.
        """

        d = Deferred()

        @wrapped(d)
        def the_result(mediaserver):
            mediaserver.client.overlay_headers = {
                'user-agent': 'Xbox/Coherence emulation'
            }
            cdc = mediaserver.client.content_directory
            self.assertEqual(self.uuid, mediaserver.udn)
            call = cdc.search(container_id='4', criteria='')
            call.addCallback(got_first_answer)

        @wrapped(d)
        def got_first_answer(r):
            """ we expect four audio files here """
            self.assertEqual(len(r), 4)
            d.callback(None)

        self.coherence.ctrl.add_query(
            DeviceQuery('uuid',
                        self.uuid,
                        the_result,
                        timeout=10,
                        oneshot=True))
        return d