Пример #1
0
 def test_files(self):
     manager = project.ProjectManager(self.mktemp())
     prj = manager.get_project(NAME)
     prj.create()
     files = [FilePath(prj.path).child(".project")]
     self.assertEqual(list(prj.files()), files)
     afile = FilePath(prj.path).child("file")
     afile.open("w").close()
     files.append(afile)
     self.assertEqual(sorted(prj.files()), sorted(files))
Пример #2
0
    def test_file_logging_rotation_5_files(self):
        """
        Only 5 logfiles are kept.
        """
        logfile = FilePath(self.mktemp()).child('foo.log')
        logfile.parent().makedirs()
        # This file will become foo.log.1
        with logfile.open('w') as f:
            f.write(b'0')
            f.truncate(int(MiB(100).to_Byte().value))
        # These file extensions will be incremented
        for i in range(1, 5):
            sibling = logfile.sibling(logfile.basename() + u'.' + unicode(i))
            with sibling.open('w') as f:
                f.write(bytes(i))

        d = self.run_script(EliotScript, options=['--logfile', logfile.path])

        def verify_logfiles(stdout_messages, logfile):
            logfile_dir = logfile.parent()
            self.assertEqual(
                # The contents of the files will now be an integer one less
                # than the integer in the file name.
                map(bytes, range(0, 4)),
                list(
                    logfile_dir.child('foo.log.{}'.format(i)).open().read(1)
                    for i
                    in range(1, 5)
                )
            )
        d.addCallback(verify_logfiles, logfile=logfile)

        return d
Пример #3
0
def get_current_version():
    manifest = FilePath(config.resources_directory).child("manifest.json")
    if not manifest.exists():
        return 0
    with manifest.open("r") as f:
        manifest = json.load(f)
    return int(manifest["version"])
Пример #4
0
def create_introducer_clients(config, main_tub):
    """
    Read, validate and parse any 'introducers.yaml' configuration.

    :returns: a list of IntroducerClient instances
    """
    # we return this list
    introducer_clients = []

    introducers_yaml_filename = config.get_private_path("introducers.yaml")
    introducers_filepath = FilePath(introducers_yaml_filename)

    try:
        with introducers_filepath.open() as f:
            introducers_yaml = yamlutil.safe_load(f)
            if introducers_yaml is None:
                raise EnvironmentError(
                    EPERM,
                    "Can't read '{}'".format(introducers_yaml_filename),
                    introducers_yaml_filename,
                )
            introducers = introducers_yaml.get("introducers", {})
            log.msg("found {} introducers in private/introducers.yaml".format(
                len(introducers), ))
    except EnvironmentError as e:
        if e.errno != ENOENT:
            raise
        introducers = {}

    if "default" in introducers.keys():
        raise ValueError(
            "'default' introducer furl cannot be specified in introducers.yaml;"
            " please fix impossible configuration.")

    # read furl from tahoe.cfg
    tahoe_cfg_introducer_furl = config.get_config("client", "introducer.furl",
                                                  None)
    if tahoe_cfg_introducer_furl == "None":
        raise ValueError("tahoe.cfg has invalid 'introducer.furl = None':"
                         " to disable it, use 'introducer.furl ='"
                         " or omit the key entirely")
    if tahoe_cfg_introducer_furl:
        introducers[u'default'] = {'furl': tahoe_cfg_introducer_furl}

    for petname, introducer in introducers.items():
        introducer_cache_filepath = FilePath(
            config.get_private_path(
                "introducer_{}_cache.yaml".format(petname)))
        ic = IntroducerClient(
            main_tub,
            introducer['furl'].encode("ascii"),
            config.nickname,
            str(allmydata.__full_version__),
            str(_Client.OLDEST_SUPPORTED_VERSION),
            node.get_app_versions(),
            partial(_sequencer, config),
            introducer_cache_filepath,
        )
        introducer_clients.append(ic)
    return introducer_clients
Пример #5
0
 def activate(self):
     super(StarboundConfigManager, self).activate()
     try:
         configuration_file = FilePath(
             self.config.starbound_path).child('starbound.config')
         if not configuration_file.exists():
             raise FatalPluginError(
                 'Could not open starbound configuration file. '
                 'Tried path: {}'.format(configuration_file))
     except AttributeError:
         raise FatalPluginError('The starbound path (starbound_path)'
                                ' is not set in the configuration.')
     try:
         with configuration_file.open() as f:
             starbound_config = json.load(f)
     except Exception as e:
         raise FatalPluginError(
             'Could not parse the starbound configuration file as JSON.'
             'Error given from JSON decoder: {}'.format(e))
     if self.config.upstream_port != starbound_config['gameServerPort']:
         raise FatalPluginError(
             'The starbound gameServerPort option ({}) does not match the '
             'config.json upstream_port ({}).'.format(
                 starbound_config['gameServerPort'],
                 self.config.upstream_port))
Пример #6
0
 def setUp(self):
     """
     Create a temporary file with a fixed payload of 64 bytes.  Create a
     resource for that file and create a request which will be for that
     resource.  Each test can set a different range header to test different
     aspects of the implementation.
     """
     path = FilePath(self.mktemp())
     # This is just a jumble of random stuff.  It's supposed to be a good
     # set of data for this test, particularly in order to avoid
     # accidentally seeing the right result by having a byte sequence
     # repeated at different locations or by having byte values which are
     # somehow correlated with their position in the string.
     self.payload = ('\xf8u\xf3E\x8c7\xce\x00\x9e\xb6a0y0S\xf0\xef\xac\xb7'
                     '\xbe\xb5\x17M\x1e\x136k{\x1e\xbe\x0c\x07\x07\t\xd0'
                     '\xbckY\xf5I\x0b\xb8\x88oZ\x1d\x85b\x1a\xcdk\xf2\x1d'
                     '&\xfd%\xdd\x82q/A\x10Y\x8b')
     path.setContent(self.payload)
     self.file = path.open()
     self.resource = static.File(self.file.name)
     self.resource.isLeaf = 1
     self.request = DummyRequest([''])
     self.request.uri = self.file.name
     self.catcher = []
     log.addObserver(self.catcher.append)
Пример #7
0
def create_zfs_pool(test_case):
    """Create a new ZFS pool, then delete it after the test is over.

    :param test_case: A ``unittest.TestCase``.

    :return: The pool's name as ``bytes``.
    """
    if os.getuid() != 0:
        raise SkipTest("Functional tests must run as root.")

    pool_name = b"testpool_%s" % (uuid.uuid4(), )
    pool_path = FilePath(test_case.mktemp())
    mount_path = FilePath(test_case.mktemp())
    with pool_path.open("wb") as f:
        f.truncate(100 * 1024 * 1024)
    test_case.addCleanup(pool_path.remove)
    try:
        run_process([
            b"zpool", b"create", b"-m", mount_path.path, pool_name,
            pool_path.path
        ])
    except OSError as e:
        if e.errno == errno.ENOENT:
            raise SkipTest(
                "Install zpool to run these tests: "
                "http://doc-dev.clusterhq.com/using/installing/index.html"
                "#optional-zfs-backend-configuration")

        raise
    test_case.addCleanup(run_process, [b"zpool", b"destroy", pool_name])
    return pool_name
Пример #8
0
    def test_file_logging_rotation_5_files(self):
        """
        Only 5 logfiles are kept.
        """
        logfile = FilePath(self.mktemp()).child('foo.log')
        logfile.parent().makedirs()
        # This file will become foo.log.1
        with logfile.open('w') as f:
            f.write(b'0')
            f.truncate(int(MiB(100).to_Byte().value))
        # These file extensions will be incremented
        for i in range(1, 5):
            sibling = logfile.sibling(logfile.basename() + u'.' + unicode(i))
            with sibling.open('w') as f:
                f.write(bytes(i))

        d = self.run_script(EliotScript, options=['--logfile', logfile.path])

        def verify_logfiles(stdout_messages, logfile):
            logfile_dir = logfile.parent()
            self.assertEqual(
                # The contents of the files will now be an integer one less
                # than the integer in the file name.
                map(bytes, range(0, 4)),
                list(
                    logfile_dir.child('foo.log.{}'.format(i)).open().read(1)
                    for i in range(1, 5)))

        d.addCallback(verify_logfiles, logfile=logfile)

        return d
Пример #9
0
 def activate(self):
     super(StarboundConfigManager, self).activate()
     try:
         configuration_file = FilePath(
             self.config.starbound_path).child('starbound.config')
         if not configuration_file.exists():
             raise FatalPluginError(
                 "Could not open starbound configuration file. Tried path: %s"
                 % configuration_file)
     except AttributeError:
         raise FatalPluginError(
             "The starbound path (starbound_path) is not set in the configuration."
         )
     try:
         with configuration_file.open() as f:
             starbound_config = json.load(f)
     except Exception as e:
         raise FatalPluginError(
             "Could not parse the starbound configuration file as JSON. Error given from JSON decoder: %s"
             % str(e))
     if self.config.upstream_port != starbound_config['gamePort']:
         raise FatalPluginError(
             "The starbound gamePort option (%d) does not match the config.json upstream_port (%d)."
             % (starbound_config['gamePort'], self.config.upstream_port))
     self._spawn = starbound_config['defaultWorldCoordinate'].split(":")
Пример #10
0
def create_zfs_pool(test_case):
    """Create a new ZFS pool, then delete it after the test is over.

    :param test_case: A ``unittest.TestCase``.

    :return: The pool's name as ``bytes``.
    """
    if os.getuid() != 0:
        raise SkipTest("Functional tests must run as root.")

    pool_name = b"testpool_%s" % (uuid.uuid4(),)
    pool_path = FilePath(test_case.mktemp())
    mount_path = FilePath(test_case.mktemp())
    with pool_path.open("wb") as f:
        f.truncate(100 * 1024 * 1024)
    test_case.addCleanup(pool_path.remove)
    try:
        run_process([b"zpool", b"create", b"-m", mount_path.path,
                     pool_name, pool_path.path])
    except OSError as e:
        if e.errno == errno.ENOENT:
            raise SkipTest(
                "Install zpool to run these tests: "
                "http://doc-dev.clusterhq.com/using/installing/index.html"
                "#optional-zfs-backend-configuration")

        raise
    test_case.addCleanup(run_process, [b"zpool", b"destroy", pool_name])
    return pool_name
def get_client(options):
    cluster = FilePath(options["cluster-yml"])
    if cluster.exists():
        config = yaml.load(cluster.open())
        certificates_path = cluster.parent()
        user = config["users"][0]
        control_service = None  # figure it out based on cluster.yml
    else:
        certificates_path = FilePath(options["certs-path"])
        if options["user"] is None:
            raise UsageError("must specify --user")
        user = options["user"]
        if options["control-service"] is None:
            raise UsageError("must specify --control-service")
        control_service = options["control-service"]

    user_certificate_filename = "%s.crt" % (user, )
    user_key_filename = "%s.key" % (user, )

    return txflocker_get_client(
        certificates_path=certificates_path,
        user_certificate_filename=user_certificate_filename,
        user_key_filename=user_key_filename,
        target_hostname=control_service,
    )
Пример #12
0
 def render_POST(self, request):
     
     context = self._context(request)
     
     form, status, appstruct = self._validate_form(request)
     
     if status == 'ok':
         filename = context['renderer'](appstruct)
         
         fileobj = FilePath(filename)
         
         request.setHeader('content-disposition', 'attachment; filename="output.pdf"')
         request.setHeader('content-length', str(fileobj.getsize()))
         request.setHeader('content-type', 'application/pdf')
         
         producer = NoRangeStaticProducer(request, fileobj.open('r'))
         
         producer.start()
         
         return NOT_DONE_YET
     
     data['renderer_form'] = form.render(appstruct)
     
     self.render_template(data, request)
     
     return NOT_DONE_YET
Пример #13
0
 def setUp(self):
     """
     Create a temporary file with a fixed payload of 64 bytes.  Create a
     resource for that file and create a request which will be for that
     resource.  Each test can set a different range header to test different
     aspects of the implementation.
     """
     path = FilePath(self.mktemp())
     # This is just a jumble of random stuff.  It's supposed to be a good
     # set of data for this test, particularly in order to avoid
     # accidentally seeing the right result by having a byte sequence
     # repeated at different locations or by having byte values which are
     # somehow correlated with their position in the string.
     self.payload = ('\xf8u\xf3E\x8c7\xce\x00\x9e\xb6a0y0S\xf0\xef\xac\xb7'
                     '\xbe\xb5\x17M\x1e\x136k{\x1e\xbe\x0c\x07\x07\t\xd0'
                     '\xbckY\xf5I\x0b\xb8\x88oZ\x1d\x85b\x1a\xcdk\xf2\x1d'
                     '&\xfd%\xdd\x82q/A\x10Y\x8b')
     path.setContent(self.payload)
     self.file = path.open()
     self.resource = static.File(self.file.name)
     self.resource.isLeaf = 1
     self.request = DummyRequest([''])
     self.request.uri = self.file.name
     self.catcher = []
     log.addObserver(self.catcher.append)
Пример #14
0
def handle_config():
    if request.method == 'POST':    
        log.msg('Received JSON post with config')     
        jsonConfig = request.get_json(True)
        mumudvbConfig = ConfigParser.SafeConfigParser()
        for cardConfig in jsonConfig:
            card = cardConfig['_']['card']
            # Check the type, dvb-c = freq/1,000, dvb-s(2) = freq/1,000,000
            type = cardConfig['_']['type'] 
            if (type == 'DVB-C'):
                cardConfig['_']['freq'] =  int(cardConfig['_']['freq'])/1000
            else:
                cardConfig['_']['freq'] =  int(cardConfig['_']['freq'])/1000000
            # The DVB-S2 type needs an additional delivery system option
            if (type == 'DVB-S2'):
                cardConfig['_']['delivery_system'] = type
            cardConfig['_']['srate'] = int(cardConfig['_']['srate'])/1000
            cardConfig['_']['log_file'] = '/var/log/mumudvb' + card
            cardConfig['_']['log_type'] = 'syslog' 
            for section in sorted(cardConfig, reverse=True):                
                mumudvbConfig.add_section(section)
                for key in cardConfig[section]:
                    if (cardConfig[section][key] != None and key != 'type'):
                            mumudvbConfig.set(section,str(key),str(cardConfig[section][key]))                            
            cardConf = FilePath(tmpdir.path+'/dvbrc_adapter' + card + '.conf')
            with FilePath.open(cardConf, 'wb') as configfile:   
                mumudvbConfig.write(configfile)
            if FilePath.isfile(cardConf):
                mumu = startMumudvb(card)
                cmd = ["mumudvb","-d","-c", cardConf.path]
                log.msg('Starting MuMuDVB with the following flags: ' + str(cmd) + ' on card ' + card)
                process = reactor.spawnProcess(mumu, cmd[0], cmd, usePTY=True, env=None)
                log.msg(process)
    return ''
Пример #15
0
def main(reactor):
    print "Message size: %d bytes   Num messages: %d" % (LENGTH, MESSAGES)
    message = b"a" * LENGTH
    fp = FilePath(tempfile.mktemp())
    writer = ThreadedFileWriter(fp.open("ab"), reactor)
    writer.startService()

    start = time.time()
    for i in range(MESSAGES):
        writer(message)
    d = writer.stopService()

    def done(_):
        elapsed = time.time() - start
        kbSec = (LENGTH * MESSAGES) / (elapsed * 1024)
        messagesSec = MESSAGES / elapsed
        print "messages/sec: %s   KB/sec: %s" % (messagesSec, kbSec)

    d.addCallback(done)

    def cleanup(result):
        fp.restat()
        print
        print "File size: ", fp.getsize()
        fp.remove()

    d.addBoth(cleanup)
    return d
Пример #16
0
def main(reactor):
    print "Message size: %d bytes   Num messages: %d" % (LENGTH, MESSAGES)
    message = b"a" * LENGTH
    fp = FilePath(tempfile.mktemp())
    writer = ThreadedFileWriter(fp.open("ab"), reactor)
    writer.startService()

    start = time.time()
    for i in range(MESSAGES):
        writer(message)
    d = writer.stopService()

    def done(_):
        elapsed = time.time() - start
        kbSec = (LENGTH * MESSAGES) / (elapsed * 1024)
        messagesSec = MESSAGES / elapsed
        print "messages/sec: %s   KB/sec: %s" % (messagesSec, kbSec)
    d.addCallback(done)

    def cleanup(result):
        fp.restat()
        print
        print "File size: ", fp.getsize()
        fp.remove()
    d.addBoth(cleanup)
    return d
def get_client(options):
    cluster = FilePath(options["cluster-yml"])
    if cluster.exists():
        config = yaml.load(cluster.open())
        certificates_path = cluster.parent()
        user = config["users"][0]
        control_service = None # figure it out based on cluster.yml
    else:
        certificates_path = FilePath(options["certs-path"])
        if options["user"] is None:
            raise UsageError("must specify --user")
        user = options["user"]
        if options["control-service"] is None:
            raise UsageError("must specify --control-service")
        control_service = options["control-service"]

    user_certificate_filename = "%s.crt" % (user,)
    user_key_filename = "%s.key" % (user,)

    return txflocker_get_client(
        certificates_path=certificates_path,
        user_certificate_filename=user_certificate_filename,
        user_key_filename=user_key_filename,
        target_hostname=control_service,
    )
Пример #18
0
def get_current_version():
    manifest = FilePath(config.resources_directory).child("manifest.json")
    if not manifest.exists():
        return 0
    with manifest.open("r") as f:
        manifest = json.load(f)
    return int(manifest["version"])
Пример #19
0
    def init_introducer_clients(self):
        self.introducer_clients = []
        self.introducer_furls = []

        introducers_yaml_filename = os.path.join(self.basedir, "private", "introducers.yaml")
        introducers_filepath = FilePath(introducers_yaml_filename)

        try:
            with introducers_filepath.open() as f:
                introducers_yaml = yamlutil.safe_load(f)
                introducers = introducers_yaml.get("introducers", {})
                log.msg("found %d introducers in private/introducers.yaml" %
                        len(introducers))
        except EnvironmentError:
            introducers = {}

        if "default" in introducers.keys():
            raise ValueError("'default' introducer furl cannot be specified in introducers.yaml; please fix impossible configuration.")

        # read furl from tahoe.cfg
        tahoe_cfg_introducer_furl = self.get_config("client", "introducer.furl", None)
        if tahoe_cfg_introducer_furl:
            introducers[u'default'] = {'furl':tahoe_cfg_introducer_furl}

        for petname, introducer in introducers.items():
            introducer_cache_filepath = FilePath(os.path.join(self.basedir, "private", "introducer_{}_cache.yaml".format(petname)))
            ic = IntroducerClient(self.tub, introducer['furl'],
                                  self.nickname,
                                  str(allmydata.__full_version__),
                                  str(self.OLDEST_SUPPORTED_VERSION),
                                  self.get_app_versions(), self._sequencer, introducer_cache_filepath)
            self.introducer_clients.append(ic)
            self.introducer_furls.append(introducer['furl'])
            ic.setServiceParent(self)
Пример #20
0
	def test_rewriteCss(self):
		"""
		Test that CSS processing works, and verify the header.
		"""
		clock = Clock()
		fc = FileCache(lambda: clock.seconds(), 1)
		temp = FilePath(self.mktemp() + '.css')
		with temp.open('wb') as f:
			f.write("p { color: red; }\n")

		# BetterFile(temp.path) would not work because the processing happens
		# in getChild.  So, create a BetterFile for the .css file's parent dir.
		bf = BetterFile(temp.parent().path, fileCache=fc, rewriteCss=True)
		d = self._requestPostpathAndRender(bf, [temp.basename()])

		headerRe = re.compile(r"/\* CSSResource processed ([0-9a-f]{32}?) \*/")
		def assertProcessedContent((request, child)):
			out = "".join(request.written)
			lines = out.split("\n")
			self.assertTrue(re.match(headerRe, lines[0]), lines[0])
			self.assertEqual("p { color: red; }", lines[1])
			self.assertEqual("", lines[2])
			self.assertEqual(3, len(lines))
		d.addCallback(assertProcessedContent)
		return d
 def activate(self):
     super(StarboundConfigManager, self).activate()
     try:
         configuration_file = FilePath(
             self.config.starbound_path
         ).child('starbound.config')
         if not configuration_file.exists():
             raise FatalPluginError(
                 'Could not open starbound configuration file. '
                 'Tried path: {}'.format(configuration_file)
             )
     except AttributeError:
         raise FatalPluginError(
             'The starbound path (starbound_path)'
             ' is not set in the configuration.'
         )
     try:
         with configuration_file.open() as f:
             starbound_config = json.load(f)
     except Exception as e:
         raise FatalPluginError(
             'Could not parse the starbound configuration file as JSON.'
             'Error given from JSON decoder: {}'.format(e)
         )
     if self.config.upstream_port != starbound_config['gameServerPort']:
         raise FatalPluginError(
             'The starbound gameServerPort option ({}) does not match the '
             'config.json upstream_port ({}).'.format(
                 starbound_config['gameServerPort'],
                 self.config.upstream_port
             )
         )
Пример #22
0
def main(wpImportFile, nikolaRoot):
    wpImportFile = FilePath(wpImportFile)
    nikolaRoot = FilePath(nikolaRoot)

    wpImport = parse(wpImportFile.open())
    for post in wpImport.iterfind('/channel/item'):
        if post.findtext('{http://wordpress.org/export/1.2/}status') != u'publish':
            continue
        updateDisqusId(disqusId(post), metaFile(nikolaRoot, post))
Пример #23
0
def gunzip(file_path):
    """
    gunzip a file in place.
    """
    tmp_location = FilePath(file_path).temporarySibling()
    in_file = gzip.open(file_path)
    with tmp_location.open('w') as out_file:
        shutil.copyfileobj(in_file, out_file)
    in_file.close()
    rename(tmp_location.path, file_path)
Пример #24
0
 def test_json_file_userdata(self):
     """
     JSON file adds to result.
     """
     json_file = FilePath(self.mktemp())
     with json_file.open('w') as f:
         f.write('{"branch": "master"}\n')
     options = BenchmarkOptions()
     options.parseOptions(['--userdata', '@{}'.format(json_file.path)])
     self.assertEqual(parse_userdata(options), {"branch": "master"})
Пример #25
0
 def test_json_file_userdata(self):
     """
     JSON file adds to result.
     """
     json_file = FilePath(self.mktemp())
     with json_file.open('w') as f:
         f.write('{"branch": "master"}\n')
     options = BenchmarkOptions()
     options.parseOptions(['--userdata', '@{}'.format(json_file.path)])
     self.assertEqual(parse_userdata(options), {"branch": "master"})
Пример #26
0
def gunzip(file_path):
    """
    gunzip a file in place.
    """
    tmp_location = FilePath(file_path).temporarySibling()
    in_file = gzip.open(file_path)
    with tmp_location.open('w') as out_file:
        shutil.copyfileobj(in_file, out_file)
    in_file.close()
    rename(tmp_location.path, file_path)
Пример #27
0
class RemoteOriginReadOptionNegotiation(unittest.TestCase):
    test_data = """line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.tmp_dir_path = tempfile.mkdtemp()
        self.target = FilePath(self.tmp_dir_path).child('foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = RemoteOriginReadSession(('127.0.0.1', 65465), self.reader,
                                          options={'blksize':'9'}, _clock=self.clock)
        self.rs.transport = self.transport

    def test_option_normal(self):
        self.rs.startProtocol()
        self.clock.advance(0.1)
        oack_datagram = OACKDatagram({'blksize':'9'}).to_wire()
        self.assertEqual(self.transport.value(), oack_datagram)
        self.clock.advance(3)
        self.assertEqual(self.transport.value(), oack_datagram * 2)

        self.transport.clear()
        self.rs.datagramReceived(ACKDatagram(0).to_wire(), ('127.0.0.1', 65465))
        self.clock.pump((1,)*3)
        self.assertEqual(self.transport.value(), DATADatagram(1, self.test_data[:9]).to_wire())

        self.addCleanup(self.rs.cancel)

    def test_option_timeout(self):
        self.rs.startProtocol()
        self.clock.advance(0.1)
        oack_datagram = OACKDatagram({'blksize':'9'}).to_wire()
        self.assertEqual(self.transport.value(), oack_datagram)
        self.failIf(self.transport.disconnecting)

        self.clock.advance(3)
        self.assertEqual(self.transport.value(), oack_datagram * 2)
        self.failIf(self.transport.disconnecting)

        self.clock.advance(2)
        self.assertEqual(self.transport.value(), oack_datagram * 3)
        self.failIf(self.transport.disconnecting)

        self.clock.advance(2)
        self.assertEqual(self.transport.value(), oack_datagram * 3)
        self.failUnless(self.transport.disconnecting)

    def tearDown(self):
        shutil.rmtree(self.tmp_dir_path)
Пример #28
0
 def deviceList(self, request, tag):
     if not CommandServer.isDisabled:
         for device in sorted(self.deviceServerFactory.devices):
             try:
                 device_path = FilePath('hamjab/resources/devices/{device}/device.json'.format(device=device))
                 with device_path.open() as device_file:
                     device_data = json.load(device_file)
                     deviceName = device_data['name'] 
             except Exception:
                 deviceName = device
             yield tag.clone().fillSlots(deviceId = device, deviceName = deviceName)
Пример #29
0
 def initialize_testlocal_state(self, test_name):
     timestamp = '1970-01-01T00:00:00Z'
     fpcleantimestamp = timestamp.replace(':', '')
     logdirname = "%s-%s" % (fpcleantimestamp, self.MENCODED_IDS)
     testconfigdir = self.mockconfigdir.child(test_name).child('secrets').child(self.MPLAN_ID).child(logdirname)
     testconfigdir.makedirs()
     MLOGFILE_fp = FilePath(testconfigdir.path + '/signup_logs')
     MSSEC2_secretsfile = FilePath(testconfigdir.path + '/SSEC2').open('a+')
     signup_logfile = MLOGFILE_fp.open('a+')
     signup_stdout = LoggingStream(signup_logfile, '>')
     signup_stderr = LoggingStream(signup_logfile, '')
     return signup_stdout, signup_stderr, MLOGFILE_fp.path, MSSEC2_secretsfile
Пример #30
0
    def test_config_directory_is_file(self):
        """
        Using --config with a file is an error
        """
        confdir = FilePath(self.mktemp())
        with confdir.open("w") as f:
            f.write("dummy\n")

        outcome = yield cli(confdir, ["list"])
        self.assertThat(outcome.code, Equals(1))
        self.assertThat(outcome.stderr,
                        Contains("Unable to load configuration"))
Пример #31
0
 def _writeDocsFor(self, ob):
     if not ob.isVisible:
         return
     if ob.documentation_location is model.DocLocation.OWN_PAGE:
         if self.dry_run:
             self.total_pages += 1
         else:
             path = FilePath(self.base).child(f'{ob.fullName()}.html')
             with path.open('wb') as out:
                 self._writeDocsForOne(ob, out)
     for o in ob.contents.values():
         self._writeDocsFor(o)
Пример #32
0
 def test_invalid_file_data(self):
     """
     Invalid file data handled.
     """
     invalid_file = FilePath(self.mktemp())
     with invalid_file.open('w') as f:
         f.write('hello\n')
     options = BenchmarkOptions()
     options.parseOptions(['--userdata', '@{}'.format(invalid_file.path)])
     with capture_stderr() as captured_stderr:
         exception = self.assertRaises(SystemExit, parse_userdata, options)
         self.assertIn('Invalid user data', exception.args[0])
         self.assertIn(options.getUsage(), captured_stderr())
Пример #33
0
    def test_readXML(self):

        fp = FilePath(self.mktemp())
        fp.open("w").write("""<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE servers SYSTEM "servertoserver.dtd">
<servers>
  <server>
    <uri>https://localhost:8543/inbox</uri>
    <allow-requests-from/>
    <allow-requests-to/>
    <domains>
        <domain>example.org</domain>
    </domains>
    <hosts>
        <host>127.0.0.1</host>
    </hosts>
  </server>
</servers>
""")

        parser = IScheduleServersParser(fp)
        self.assertEqual(len(parser.servers), 1)
Пример #34
0
class TestRegion(TestCase):

    def setUp(self):
        self.fp = FilePath(self.mktemp())
        self.region = Region(self.fp)

    def test_trivial(self):
        pass

    def test_create(self):
        self.region.create()
        with self.fp.open("r") as handle:
            self.assertEqual(handle.read(), "\x00" * 8192)
Пример #35
0
    def test_readXML(self):

        fp = FilePath(self.mktemp())
        fp.open("w").write("""<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE servers SYSTEM "servertoserver.dtd">
<servers>
  <server>
    <uri>https://localhost:8543/inbox</uri>
    <allow-requests-from/>
    <allow-requests-to/>
    <domains>
        <domain>example.org</domain>
    </domains>
    <hosts>
        <host>127.0.0.1</host>
    </hosts>
  </server>
</servers>
""")

        parser = IScheduleServersParser(fp)
        self.assertEqual(len(parser.servers), 1)
Пример #36
0
 def test_closesResolvConf(self):
     """
     As part of its constructor, C{StubResolver} opens C{/etc/resolv.conf};
     then, explicitly closes it and does not count on the GC to do so for
     it.
     """
     handle = FilePath(self.mktemp())
     resolvConf = handle.open(mode='w+')
     class StubResolver(client.Resolver):
         def _openFile(self, name):
             return resolvConf
     StubResolver(servers=["example.com", 53], resolv='/etc/resolv.conf',
                  reactor=Clock())
     self.assertTrue(resolvConf.closed)
Пример #37
0
 def deviceList(self, request, tag):
     if not CommandServer.isDisabled:
         for device in sorted(self.deviceServerFactory.devices):
             try:
                 device_path = FilePath(
                     'hamjab/resources/devices/{device}/device.json'.format(
                         device=device))
                 with device_path.open() as device_file:
                     device_data = json.load(device_file)
                     deviceName = device_data['name']
             except Exception:
                 deviceName = device
             yield tag.clone().fillSlots(deviceId=device,
                                         deviceName=deviceName)
Пример #38
0
 def load_connections(self):
     """
     Load the connections.yaml file if it exists, otherwise
     create a default configuration.
     """
     fn = os.path.join(self.basedir, "private", "connections.yaml")
     connections_filepath = FilePath(fn)
     try:
         with connections_filepath.open() as f:
             self.connections_config = yamlutil.safe_load(f)
     except EnvironmentError:
         self.connections_config = { 'servers' : {} }
         content = yamlutil.safe_dump(self.connections_config)
         connections_filepath.setContent(content)
Пример #39
0
 def test_closesResolvConf(self):
     """
     As part of its constructor, C{StubResolver} opens C{/etc/resolv.conf};
     then, explicitly closes it and does not count on the GC to do so for
     it.
     """
     handle = FilePath(self.mktemp())
     resolvConf = handle.open(mode='w+')
     class StubResolver(client.Resolver):
         def _openFile(self, name):
             return resolvConf
     StubResolver(servers=["example.com", 53], resolv='/etc/resolv.conf',
                  reactor=Clock())
     self.assertTrue(resolvConf.closed)
Пример #40
0
 def load_connections(self):
     """
     Load the connections.yaml file if it exists, otherwise
     create a default configuration.
     """
     fn = os.path.join(self.basedir, "private", "connections.yaml")
     connections_filepath = FilePath(fn)
     try:
         with connections_filepath.open() as f:
             self.connections_config = yamlutil.safe_load(f)
     except EnvironmentError:
         self.connections_config = { 'servers' : {} }
         content = yamlutil.safe_dump(self.connections_config)
         connections_filepath.setContent(content)
Пример #41
0
        def _wrap(path,**template):
            scratchfile=path.dirname()+"."+path.basename()+".tmp"
            fh=path.open('r')

            sfp=FilePath(scratchfile)
            sfh=sfp.open('w')
            seeklast=0
            for buffer in fh.readlines():
                for line in buffer:
                    sfh.write(line.format(**template))
            sfh.flush()
            sfh.close()
            fh.close()

            sfp.moveTo(path.realpath())
Пример #42
0
class BootstrapRemoteOriginRead(unittest.TestCase):
    test_data = """line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.tmp_dir_path = tempfile.mkdtemp()
        self.target = FilePath(self.tmp_dir_path).child('foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = RemoteOriginReadSession(('127.0.0.1', 65465), self.reader, _clock=self.clock)
        self.rs.transport = self.transport

    @inlineCallbacks
    def test_invalid_tid(self):
        self.rs.startProtocol()
        data_datagram = DATADatagram(1, 'foobar')
        yield self.rs.datagramReceived(data_datagram, ('127.0.0.1', 11111))
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.rs.cancel)

    def test_remote_origin_read_bootstrap(self):
        # First datagram
        self.rs.session.block_size = 5
        self.rs.startProtocol()
        self.clock.pump((1,)*3)

        data_datagram_1 = DATADatagram(1, self.test_data[:5])

        self.assertEqual(self.transport.value(), data_datagram_1.to_wire())
        self.failIf(self.transport.disconnecting)

        # Normal exchange continues
        self.transport.clear()
        self.rs.datagramReceived(ACKDatagram(1).to_wire(), ('127.0.0.1', 65465))
        self.clock.pump((1,)*3)
        data_datagram_2 = DATADatagram(2, self.test_data[5:10])
        self.assertEqual(self.transport.value(), data_datagram_2.to_wire())
        self.failIf(self.transport.disconnecting)
        self.addCleanup(self.rs.cancel)

    def tearDown(self):
        shutil.rmtree(self.tmp_dir_path)
Пример #43
0
class BootstrapRemoteOriginRead(unittest.TestCase):
    test_data = """line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.tmp_dir_path = tempfile.mkdtemp()
        self.target = FilePath(self.tmp_dir_path).child('foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = RemoteOriginReadSession(('127.0.0.1', 65465), self.reader, _clock=self.clock)
        self.rs.transport = self.transport

    @inlineCallbacks
    def test_invalid_tid(self):
        self.rs.startProtocol()
        data_datagram = DATADatagram(1, 'foobar')
        yield self.rs.datagramReceived(data_datagram, ('127.0.0.1', 11111))
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.rs.cancel)

    def test_remote_origin_read_bootstrap(self):
        # First datagram
        self.rs.session.block_size = 5
        self.rs.startProtocol()
        self.clock.pump((1,)*3)

        data_datagram_1 = DATADatagram(1, self.test_data[:5])

        self.assertEqual(self.transport.value(), data_datagram_1.to_wire())
        self.failIf(self.transport.disconnecting)

        # Normal exchange continues
        self.transport.clear()
        self.rs.datagramReceived(ACKDatagram(1).to_wire(), ('127.0.0.1', 65465))
        self.clock.pump((1,)*3)
        data_datagram_2 = DATADatagram(2, self.test_data[5:10])
        self.assertEqual(self.transport.value(), data_datagram_2.to_wire())
        self.failIf(self.transport.disconnecting)
        self.addCleanup(self.rs.cancel)

    def tearDown(self):
        shutil.rmtree(self.tmp_dir_path)
Пример #44
0
 def load_static_servers(self):
     """
     Load the servers.yaml file if it exists, and provide the static
     server data to the StorageFarmBroker.
     """
     fn = self.config.get_private_path("servers.yaml")
     servers_filepath = FilePath(fn)
     try:
         with servers_filepath.open() as f:
             servers_yaml = yamlutil.safe_load(f)
         static_servers = servers_yaml.get("storage", {})
         log.msg("found %d static servers in private/servers.yaml" %
                 len(static_servers))
         self.storage_broker.set_static_servers(static_servers)
     except EnvironmentError:
         pass
Пример #45
0
 def load_static_servers(self):
     """
     Load the servers.yaml file if it exists, and provide the static
     server data to the StorageFarmBroker.
     """
     fn = os.path.join(self.basedir, "private", "servers.yaml")
     servers_filepath = FilePath(fn)
     try:
         with servers_filepath.open() as f:
             servers_yaml = yamlutil.safe_load(f)
         static_servers = servers_yaml.get("storage", {})
         log.msg("found %d static servers in private/servers.yaml" %
                 len(static_servers))
         self.storage_broker.set_static_servers(static_servers)
     except EnvironmentError:
         pass
Пример #46
0
def getChannelStatus():
    statusFiles = FilePath.globChildren(mumudvblogdir, 'channels*')
    for path in statusFiles:
        for line in FilePath.open(path):
            card = path.path.split('adapter')[1].split('_')[0]
            fields = line.split(':')
            channelStatus[fields[0] + ':' + fields[1]] = {}
            try:
                channelStatus[fields[0] + ':' + fields[1]]['streamstatus']  = fields[3][:len(fields[3])-1]
            except IndexError:
                channelStatus[fields[0] + ':' + fields[1]]['streamstatus'] = 'NotTransmitted'            
            channelStatus[fields[0] + ':' + fields[1]]['card'] = card
            channelStatus[fields[0] + ':' + fields[1]]['ip'] = fields[0] + ':' + fields[1]
            channelStatus[fields[0] + ':' + fields[1]]['name'] = fields[2]
            # Set cardstatus to 0 if it does not yet exist
            channelStatus[fields[0] + ':' + fields[1]]['cardstatus'] = (channelStatus[fields[0] + ':' + fields[1]].get('cardstatus',0))
Пример #47
0
class BootstrapRemoteOriginWrite(unittest.TestCase):

    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.tmp_dir_path = tempfile.mkdtemp()
        self.target = FilePath(self.tmp_dir_path).child("foo")
        self.writer = DelayedWriter(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=("127.0.0.1", self.port))
        self.ws = RemoteOriginWriteSession(("127.0.0.1", 65465), self.writer, _clock=self.clock)
        self.ws.transport = self.transport
        self.ws.startProtocol()

    @inlineCallbacks
    def test_invalid_tid(self):
        bad_tid_dgram = ACKDatagram(123)
        yield self.ws.datagramReceived(bad_tid_dgram.to_wire(), ("127.0.0.1", 1111))
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.ws.cancel)

    def test_remote_origin_write_bootstrap(self):
        # Initial ACK
        ack_datagram_0 = ACKDatagram(0)
        self.clock.advance(0.1)
        self.assertEqual(self.transport.value(), ack_datagram_0.to_wire())
        self.failIf(self.transport.disconnecting)

        # Normal exchange
        self.transport.clear()
        d = self.ws.datagramReceived(DATADatagram(1, "foobar").to_wire(), ("127.0.0.1", 65465))

        def cb(res):
            self.clock.advance(0.1)
            ack_datagram_1 = ACKDatagram(1)
            self.assertEqual(self.transport.value(), ack_datagram_1.to_wire())
            self.assertEqual(self.target.open("r").read(), "foobar")
            self.failIf(self.transport.disconnecting)
            self.addCleanup(self.ws.cancel)

        d.addCallback(cb)
        self.clock.advance(3)
        return d

    def tearDown(self):
        shutil.rmtree(self.tmp_dir_path)
Пример #48
0
def fakeGetPageURL(url):
    """
    Used for mocking coherence.upnp.core.utils.getPage. Returns the
    content of the file with the name taken from the final component
    of a url-path.

    Example:
      http://1.2.3.4/a/b/c/some.xml -> <module-dir>/some.xml
    """
    path = urlparse.urlparse(url).path
    path = posixpath.normpath(path)
    words = path.split('/')
    words = filter(None, words)[-1:]
    file = FilePath(os.path.join(FILE_BASE, *words))
    d = Deferred()
    d.callback((file.open().read(), {}))
    return d
Пример #49
0
 def test_invalid_file_data(self):
     """
     Invalid file data handled.
     """
     invalid_file = FilePath(self.mktemp())
     with invalid_file.open('w') as f:
         f.write('hello\n')
     options = BenchmarkOptions()
     options.parseOptions(['--userdata', '@{}'.format(invalid_file.path)])
     with capture_stderr() as captured_stderr:
         exception = self.assertRaises(
             SystemExit, parse_userdata, options
         )
         self.assertIn(
             'Invalid user data', exception.args[0]
         )
         self.assertIn(options.getUsage(), captured_stderr())
Пример #50
0
def fakeGetPageURL(url):
    """
    Used for mocking coherence.upnp.core.utils.getPage. Returns the
    content of the file with the name taken from the final component
    of a url-path.

    Example:
      http://1.2.3.4/a/b/c/some.xml -> <module-dir>/some.xml
    """
    path = urlparse.urlparse(url).path
    path = posixpath.normpath(path)
    words = path.split('/')
    words = filter(None, words)[-1:]
    file = FilePath(os.path.join(FILE_BASE, *words))
    d = Deferred()
    d.callback((file.open().read(), {}))
    return d
Пример #51
0
class BootstrapLocalOriginRead(unittest.TestCase):
    test_data = """line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.tmp_dir_path = tempfile.mkdtemp()
        self.target = FilePath(self.tmp_dir_path).child('foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = LocalOriginReadSession(('127.0.0.1', 65465), self.reader, _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.rs.timedOut, _clock=self.clock)
        self.rs.timeout_watchdog = self.wd
        self.rs.transport = self.transport
        self.rs.startProtocol()

    def test_invalid_tid(self):
        data_datagram = DATADatagram(1, 'foobar')
        self.rs.datagramReceived(data_datagram, ('127.0.0.1', 11111))
        self.clock.advance(0.1)
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.rs.cancel)

    def test_local_origin_read_session_handshake_timeout(self):
        self.clock.advance(5)
        self.failIf(self.transport.value())
        self.failUnless(self.transport.disconnecting)

    def test_local_origin_read_session_handshake_success(self):
        self.clock.advance(1)
        ack_datagram = ACKDatagram(0)
        self.rs.datagramReceived(ack_datagram.to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(2)
        self.failUnless(self.transport.value())
        self.failIf(self.transport.disconnecting)
        self.failIf(self.wd.active())
        self.addCleanup(self.rs.cancel)

    def tearDown(self):
        shutil.rmtree(self.tmp_dir_path)
Пример #52
0
class BootstrapLocalOriginRead(unittest.TestCase):
    test_data = """line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.tmp_dir_path = tempfile.mkdtemp()
        self.target = FilePath(self.tmp_dir_path).child('foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = LocalOriginReadSession(('127.0.0.1', 65465), self.reader, _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.rs.timedOut, _clock=self.clock)
        self.rs.timeout_watchdog = self.wd
        self.rs.transport = self.transport
        self.rs.startProtocol()

    def test_invalid_tid(self):
        data_datagram = DATADatagram(1, 'foobar')
        self.rs.datagramReceived(data_datagram, ('127.0.0.1', 11111))
        self.clock.advance(0.1)
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.rs.cancel)

    def test_local_origin_read_session_handshake_timeout(self):
        self.clock.advance(5)
        self.failIf(self.transport.value())
        self.failUnless(self.transport.disconnecting)

    def test_local_origin_read_session_handshake_success(self):
        self.clock.advance(1)
        ack_datagram = ACKDatagram(0)
        self.rs.datagramReceived(ack_datagram.to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(2)
        self.failUnless(self.transport.value())
        self.failIf(self.transport.disconnecting)
        self.failIf(self.wd.active())
        self.addCleanup(self.rs.cancel)

    def tearDown(self):
        shutil.rmtree(self.tmp_dir_path)
Пример #53
0
class BootstrapRemoteOriginWrite(unittest.TestCase):

    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.tmp_dir_path = tempfile.mkdtemp()
        self.target = FilePath(self.tmp_dir_path).child('foo')
        self.writer = DelayedWriter(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.ws = RemoteOriginWriteSession(('127.0.0.1', 65465), self.writer, _clock=self.clock)
        self.ws.transport = self.transport
        self.ws.startProtocol()

    @inlineCallbacks
    def test_invalid_tid(self):
        bad_tid_dgram = ACKDatagram(123)
        yield self.ws.datagramReceived(bad_tid_dgram.to_wire(), ('127.0.0.1', 1111))
        err_dgram = TFTPDatagramFactory(*split_opcode(self.transport.value()))
        self.assertEqual(err_dgram.errorcode, ERR_TID_UNKNOWN)
        self.addCleanup(self.ws.cancel)

    def test_remote_origin_write_bootstrap(self):
        # Initial ACK
        ack_datagram_0 = ACKDatagram(0)
        self.clock.advance(0.1)
        self.assertEqual(self.transport.value(), ack_datagram_0.to_wire())
        self.failIf(self.transport.disconnecting)

        # Normal exchange
        self.transport.clear()
        d = self.ws.datagramReceived(DATADatagram(1, 'foobar').to_wire(), ('127.0.0.1', 65465))
        def cb(res):
            self.clock.advance(0.1)
            ack_datagram_1 = ACKDatagram(1)
            self.assertEqual(self.transport.value(), ack_datagram_1.to_wire())
            self.assertEqual(self.target.open('r').read(), 'foobar')
            self.failIf(self.transport.disconnecting)
            self.addCleanup(self.ws.cancel)
        d.addCallback(cb)
        self.clock.advance(3)
        return d

    def tearDown(self):
        shutil.rmtree(self.tmp_dir_path)
Пример #54
0
class LocalOriginReadOptionNegotiation(unittest.TestCase):
    test_data = """line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.tmp_dir_path = tempfile.mkdtemp()
        self.target = FilePath(self.tmp_dir_path).child('foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = LocalOriginReadSession(('127.0.0.1', 65465), self.reader, _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.rs.timedOut, _clock=self.clock)
        self.rs.timeout_watchdog = self.wd
        self.rs.transport = self.transport

    def test_option_normal(self):
        self.rs.startProtocol()
        self.rs.datagramReceived(OACKDatagram({'blksize':'9'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.rs.session.block_size, 9)
        self.clock.pump((1,)*3)
        self.assertEqual(self.transport.value(), DATADatagram(1, self.test_data[:9]).to_wire())

        self.rs.datagramReceived(OACKDatagram({'blksize':'12'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.rs.session.block_size, 9)

        self.transport.clear()
        self.rs.datagramReceived(ACKDatagram(1).to_wire(), ('127.0.0.1', 65465))
        self.clock.pump((1,)*3)
        self.assertEqual(self.transport.value(), DATADatagram(2, self.test_data[9:18]).to_wire())

        self.addCleanup(self.rs.cancel)

    def test_local_origin_read_option_timeout(self):
        self.rs.startProtocol()
        self.clock.advance(5)
        self.failUnless(self.transport.disconnecting)

    def tearDown(self):
        shutil.rmtree(self.tmp_dir_path)
Пример #55
0
class LocalOriginReadOptionNegotiation(unittest.TestCase):
    test_data = """line1
line2
anotherline"""
    port = 65466

    def setUp(self):
        self.clock = Clock()
        self.tmp_dir_path = tempfile.mkdtemp()
        self.target = FilePath(self.tmp_dir_path).child('foo')
        with self.target.open('wb') as temp_fd:
            temp_fd.write(self.test_data)
        self.reader = DelayedReader(self.target, _clock=self.clock, delay=2)
        self.transport = FakeTransport(hostAddress=('127.0.0.1', self.port))
        self.rs = LocalOriginReadSession(('127.0.0.1', 65465), self.reader, _clock=self.clock)
        self.wd = MockHandshakeWatchdog(4, self.rs.timedOut, _clock=self.clock)
        self.rs.timeout_watchdog = self.wd
        self.rs.transport = self.transport

    def test_option_normal(self):
        self.rs.startProtocol()
        self.rs.datagramReceived(OACKDatagram({'blksize':'9'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.rs.session.block_size, 9)
        self.clock.pump((1,)*3)
        self.assertEqual(self.transport.value(), DATADatagram(1, self.test_data[:9]).to_wire())

        self.rs.datagramReceived(OACKDatagram({'blksize':'12'}).to_wire(), ('127.0.0.1', 65465))
        self.clock.advance(0.1)
        self.assertEqual(self.rs.session.block_size, 9)

        self.transport.clear()
        self.rs.datagramReceived(ACKDatagram(1).to_wire(), ('127.0.0.1', 65465))
        self.clock.pump((1,)*3)
        self.assertEqual(self.transport.value(), DATADatagram(2, self.test_data[9:18]).to_wire())

        self.addCleanup(self.rs.cancel)

    def test_local_origin_read_option_timeout(self):
        self.rs.startProtocol()
        self.clock.advance(5)
        self.failUnless(self.transport.disconnecting)

    def tearDown(self):
        shutil.rmtree(self.tmp_dir_path)
Пример #56
0
    def init_introducer_clients(self):
        self.introducer_clients = []
        self.introducer_furls = []

        introducers_yaml_filename = os.path.join(self.basedir, "private",
                                                 "introducers.yaml")
        introducers_filepath = FilePath(introducers_yaml_filename)

        try:
            with introducers_filepath.open() as f:
                introducers_yaml = yamlutil.safe_load(f)
                introducers = introducers_yaml.get("introducers", {})
                log.msg("found %d introducers in private/introducers.yaml" %
                        len(introducers))
        except EnvironmentError:
            introducers = {}

        if "default" in introducers.keys():
            raise ValueError(
                "'default' introducer furl cannot be specified in introducers.yaml; please fix impossible configuration."
            )

        # read furl from tahoe.cfg
        tahoe_cfg_introducer_furl = self.get_config("client",
                                                    "introducer.furl", None)
        if tahoe_cfg_introducer_furl == "None":
            raise ValueError("tahoe.cfg has invalid 'introducer.furl = None':"
                             " to disable it, use 'introducer.furl ='"
                             " or omit the key entirely")
        if tahoe_cfg_introducer_furl:
            introducers[u'default'] = {'furl': tahoe_cfg_introducer_furl}

        for petname, introducer in introducers.items():
            introducer_cache_filepath = FilePath(
                os.path.join(self.basedir, "private",
                             "introducer_{}_cache.yaml".format(petname)))
            ic = IntroducerClient(self.tub, introducer['furl'].encode("ascii"),
                                  self.nickname,
                                  str(allmydata.__full_version__),
                                  str(self.OLDEST_SUPPORTED_VERSION),
                                  self.get_app_versions(), self._sequencer,
                                  introducer_cache_filepath)
            self.introducer_clients.append(ic)
            self.introducer_furls.append(introducer['furl'])
            ic.setServiceParent(self)
Пример #57
0
class PublicKeyChecker(SSHPublicKeyDatabase):

    def __init__(self, filename):
        self.filepath = FilePath(filename)

    def getAuthorizedKeysFiles(self, credentials):
        return [self.filepath]

    def checkKey(self, credentials):
        for line in self.filepath.open():
            parts = line.split()
            if len(parts) < 2:
                continue
            try:
                if base64.decodestring(parts[1]) == credentials.blob:
                    return True
            except binascii.Error:
                continue
        return False
Пример #58
0
    def test_file_logging_rotation_at_100MiB(self):
        """
        Logfiles are rotated when they reach 100MiB.
        """
        logfile = FilePath(self.mktemp()).child('foo.log')
        logfile.parent().makedirs()
        with logfile.open('w') as f:
            f.truncate(int(MiB(100).to_Byte().value - 1))

        d = self.run_script(EliotScript, options=['--logfile', logfile.path])

        def verify_logfiles(stdout_messages, logfile):
            self.assertEqual(
                set([logfile, logfile.sibling(logfile.basename() + u'.1')]),
                set(logfile.parent().children())
            )
        d.addCallback(verify_logfiles, logfile=logfile)

        return d
Пример #59
0
def create_zfs_pool(test_case):
    """Create a new ZFS pool, then delete it after the test is over.

    :param test_case: A ``unittest.TestCase``.

    :return: The pool's name as ``bytes``.
    """
    if os.getuid() != 0:
        raise SkipTest("Functional tests must run as root.")

    pool_name = b"testpool_%s" % (uuid.uuid4(), )
    pool_path = FilePath(test_case.mktemp())
    mount_path = FilePath(test_case.mktemp())
    with pool_path.open("wb") as f:
        f.truncate(100 * 1024 * 1024)
    test_case.addCleanup(pool_path.remove)
    subprocess.check_call([
        b"zpool", b"create", b"-m", mount_path.path, pool_name, pool_path.path
    ])
    test_case.addCleanup(subprocess.check_call,
                         [b"zpool", b"destroy", pool_name])
    return pool_name