def _get_capinfos(self, username, ip_addr): """ Get capinfos of user's pcap. This should be run afte the account has expired and after executing pcapsummarizer on the user's directory. :param username (str): account username :param ip_addr (IPv4Address): IP address allocated for the account. :return: String with capinfos content. """ user_dir = "{}_{}".format(username, str(ip_addr)) user_dir = os.path.join(self.path['pcaps'], user_dir) user_dir_fp = FilePath(user_dir) capinfos = None for f in user_dir_fp.listdir(): filename = os.path.join(user_dir, f) fp = FilePath(filename) if re.match(".*\.capinfos$", fp.basename()): if capinfos is None: capinfos = fp.getContent() else: capinfos = "{}\n\n{}".format(capinfos, fp.getContent()) return capinfos
def parseArgs(self, deployment_config, application_config): deployment_config = FilePath(deployment_config) application_config = FilePath(application_config) if not deployment_config.exists(): raise UsageError('No file exists at {path}' .format(path=deployment_config.path)) if not application_config.exists(): raise UsageError('No file exists at {path}' .format(path=application_config.path)) self["deployment_config"] = deployment_config.getContent() self["application_config"] = application_config.getContent() try: deploy_config_obj = safe_load(self["deployment_config"]) except YAMLError as e: raise UsageError( ("Deployment configuration at {path} could not be parsed as " "YAML:\n\n{error}").format( path=deployment_config.path, error=str(e) ) ) try: app_config_obj = safe_load(self["application_config"]) except YAMLError as e: raise UsageError( ("Application configuration at {path} could not be parsed as " "YAML:\n\n{error}").format( path=application_config.path, error=str(e) ) ) try: fig_configuration = FigConfiguration(app_config_obj) if fig_configuration.is_valid_format(): applications = fig_configuration.applications() self['application_config'] = ( applications_to_flocker_yaml(applications) ) else: configuration = FlockerConfiguration(app_config_obj) if configuration.is_valid_format(): applications = configuration.applications() else: raise ConfigurationError( "Configuration is not a valid Fig or Flocker format." ) self['deployment'] = model_from_configuration( applications=applications, deployment_configuration=deploy_config_obj) except ConfigurationError as e: raise UsageError(str(e))
def from_paths( cls, endpoint, private_key_path: FilePath, cert_path: FilePath ) -> "_TLSEndpointWrapper": """ Create an endpoint with the given private key and certificate paths on the filesystem. """ certificate = Certificate.loadPEM(cert_path.getContent()).original private_key = PrivateCertificate.loadPEM( cert_path.getContent() + b"\n" + private_key_path.getContent() ).privateKey.original certificate_options = CertificateOptions( privateKey=private_key, certificate=certificate ) return cls(endpoint=endpoint, context_factory=certificate_options)
def postOptions(self): if self['distribution'] is None: raise UsageError("Distribution required.") if self['config-file'] is not None: config_file = FilePath(self['config-file']) self['config'] = yaml.safe_load(config_file.getContent()) else: self['config'] = {} if self['flocker-version']: rpm_version = make_rpm_version(self['flocker-version']) os_version = "%s-%s" % (rpm_version.version, rpm_version.release) if os_version.endswith('.dirty'): os_version = os_version[:-len('.dirty')] else: os_version = None self['package_source'] = PackageSource( version=self['flocker-version'], os_version=os_version, branch=self['branch'], build_server=self['build-server'], ) if self['pip']: supported = PIP_DISTRIBUTIONS else: supported = PACKAGED_CLIENT_DISTRIBUTIONS if self['distribution'] not in supported: raise UsageError( "Distribution %r not supported. Available distributions: %s" % (self['distribution'], ', '.join(supported)))
def test_no_config_written(self): """If no config file exists, a new one is written with the UUID.""" path = FilePath(self.mktemp()) service = VolumeService(path, None, reactor=Clock()) service.startService() config = json.loads(path.getContent()) self.assertEqual({u"uuid": service.uuid, u"version": 1}, config)
def get_backend_api(test_case, cluster_id): """ Get an appropriate BackendAPI for the specified dataset backend. Note this is a backdoor that is useful to be able to interact with cloud APIs in tests. For many dataset backends this does not make sense, but it provides a convenient means to interact with cloud backends such as EBS or cinder. :param test_case: The test case that is being run. :param cluster_id: The unique cluster_id, used for backend APIs that require this in order to be constructed. """ backend_type = get_dataset_backend(test_case) if backend_type != DatasetBackend.aws: raise SkipTest( 'This test is asking for backend type {} but only constructing ' 'aws backends is currently supported'.format(backend_type.name)) backend_name = backend_type.name backend_config_filename = environ.get( "FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG") if backend_config_filename is None: raise SkipTest( 'This test requires the ability to construct an IBlockDeviceAPI ' 'in order to verify construction. Please set ' 'FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG to a yaml filepath ' 'with the dataset configuration.') backend_config_filepath = FilePath(backend_config_filename) full_backend_config = yaml.safe_load(backend_config_filepath.getContent()) backend_config = full_backend_config.get(backend_name) if 'backend' in backend_config: backend_config.pop('backend') return aws_from_configuration(cluster_id=cluster_id, **backend_config)
def onConnect(self,request): myAES = AESCipher(key) if 'cookie' in request.headers: try: cookie = Cookie.SimpleCookie() cookie.load(str(request.headers['cookie'])) except Cookie.CookieError: pass if ('wsid' in cookie) and ('PSClient' in request.headers['user-agent']): wsid = cookie['wsid'].value cambot = json.loads(myAES.decrypt(wsid)) if cambot['id'] in request.path: self.temp_location = self.factory.temp_path.child(cambot['id']) if not self.temp_location.exists(): self.temp_location.makedirs() f = self.temp_location.child(u'index.html') g = FilePath("/home/chetan/pscore/templates/live_hls.html").asTextMode() content = g.getContent() new = content.replace("++camid++",cambot['id']) f.setContent(new) return None else: self.sendClose(1000,"Not authorised") else: self.sendClose(1000,"Not authorised") else: self.sendClose(1000,"Not authorised")
def test_createSSLPort(self): """ If a given valid strport description of an SSL port and the storeID of an extant factory, I{axiomatic port create} creates a new L{SSLPort} with the specified configuration and referring to that factory. The certificate file specified is copied to a path inside the Store's files directory. The port is also powered up on the store for L{IService}. """ pemPath = FilePath(self.mktemp()) pemPath.setContent(CERTIFICATE_DATA + PRIVATEKEY_DATA) store = Store(filesdir=self.mktemp()) factory = DummyFactory(store=store) self.assertSuccessStatus(self._makeConfig(store), [ "create", "--strport", "ssl:8443:certKey=" + pemPath.path + ":privateKey=" + pemPath.path, "--factory-identifier", str(factory.storeID) ]) self.assertEqual("Created.\n", sys.stdout.getvalue()) [ssl] = list(store.query(SSLPort)) self.assertEqual(ssl.portNumber, 8443) self.assertEqual(ssl.certificatePath.getContent(), CERTIFICATE_DATA + PRIVATEKEY_DATA) self.assertIdentical(ssl.factory, factory) self.assertEqual(pemPath.getContent(), CERTIFICATE_DATA + PRIVATEKEY_DATA) self.assertEqual(list(store.interfacesFor(ssl)), [IService])
def load_or_create_client_key(key_file): """Load the ACME account key from a file, creating it if it does not exist. Args: key_file (str): name of the file to use as the account key """ # this is based on txacme.endpoint.load_or_create_client_key, but doesn't # hardcode the 'client.key' filename acme_key_file = FilePath(key_file) if acme_key_file.exists(): logger.info("Loading ACME account key from '%s'", acme_key_file) key = serialization.load_pem_private_key( acme_key_file.getContent(), password=None, backend=default_backend() ) else: logger.info("Saving new ACME account key to '%s'", acme_key_file) key = generate_private_key("rsa") acme_key_file.setContent( key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption(), ) ) return JWKRSA(key=key)
def test_full(self): """ Running C{calendarserver_export} on the command line exports an ics file. (Almost-full integration test, starting from the main point, using as few test fakes as possible.) Note: currently the only test for directory interaction. """ yield populateCalendarsFrom( { "user02": { # TODO: more direct test for skipping inbox "inbox": { "inbox-item.ics": (valentines, {}) }, "calendar1": { "peruser.ics": (dataForTwoUsers, {}), # EST } } }, self.store ) output = FilePath(self.mktemp()) main(['calendarserver_export', '--output', output.path, '--user', 'user02'], reactor=self) yield self.waitToStop self.assertEquals( Component.fromString(resultForUser2), Component.fromString(output.getContent()) )
def test_full(self): """ Running C{calendarserver_export} on the command line exports an ics file. (Almost-full integration test, starting from the main point, using as few test fakes as possible.) Note: currently the only test for directory interaction. """ yield populateCalendarsFrom( { "user02": { # TODO: more direct test for skipping inbox "inbox": { "inbox-item.ics": (valentines, {}) }, "calendar1": { "peruser.ics": (dataForTwoUsers, {}), # EST } } }, self.store) output = FilePath(self.mktemp()) main([ 'calendarserver_export', '--output', output.path, '--user', 'user02' ], reactor=self) yield self.waitToStop self.assertEquals(Component.fromString(resultForUser2), Component.fromString(output.getContent()))
def onConnect(self, request): myAES = AESCipher(key) if 'cookie' in request.headers: try: cookie = Cookie.SimpleCookie() cookie.load(str(request.headers['cookie'])) except Cookie.CookieError: pass if ('wsid' in cookie) and ('PSClient' in request.headers['user-agent']): wsid = cookie['wsid'].value cambot = json.loads(myAES.decrypt(wsid)) if cambot['id'] in request.path: self.temp_location = self.factory.temp_path.child( cambot['id']) if not self.temp_location.exists(): self.temp_location.makedirs() f = self.temp_location.child(u'index.html') g = FilePath("/home/chetan/pscore/templates/live_hls.html" ).asTextMode() content = g.getContent() new = content.replace("++camid++", cambot['id']) f.setContent(new) return None else: self.sendClose(1000, "Not authorised") else: self.sendClose(1000, "Not authorised") else: self.sendClose(1000, "Not authorised")
def postOptions(self): if self['distribution'] is None: raise UsageError("Distribution required.") if self['config-file'] is not None: config_file = FilePath(self['config-file']) self['config'] = yaml.safe_load(config_file.getContent()) else: self['config'] = {} provider = self['provider'].lower() provider_config = self['config'].get(provider, {}) package_source = PackageSource( version=self['flocker-version'], branch=self['branch'], build_server=self['build-server'], ) try: get_runner = getattr(self, "_runner_" + provider.upper()) except AttributeError: raise UsageError( "Provider {!r} not supported. Available providers: {}".format( provider, ', '.join( name.lower() for name in self._get_provider_names() ) ) ) else: self.runner = get_runner( package_source=package_source, dataset_backend=self.dataset_backend(), provider_config=provider_config, )
def isDocker(self, _initCGroupLocation="/proc/1/cgroup"): """ Check if the current platform is Linux in a Docker container. @return: C{True} if the current platform has been detected as Linux inside a Docker container. @rtype: C{bool} """ if not self.isLinux(): return False from twisted.python.filepath import FilePath # Ask for the cgroups of init (pid 1) initCGroups = FilePath(_initCGroupLocation) if initCGroups.exists(): # The cgroups file looks like "2:cpu:/". The third element will # begin with /docker if it is inside a Docker container. controlGroups = [x.split(b":") for x in initCGroups.getContent().split(b"\n")] for group in controlGroups: if len(group) == 3 and group[2].startswith(b"/docker/"): # If it starts with /docker/, we're in a docker container return True return False
def get_backend_api(test_case, cluster_id): """ Get an appropriate BackendAPI for the specified dataset backend. Note this is a backdoor that is useful to be able to interact with cloud APIs in tests. For many dataset backends this does not make sense, but it provides a convenient means to interact with cloud backends such as EBS or cinder. :param test_case: The test case that is being run. :param cluster_id: The unique cluster_id, used for backend APIs that require this in order to be constructed. """ backend_type = get_dataset_backend(test_case) if backend_type != DatasetBackend.aws: raise SkipTest( 'This test is asking for backend type {} but only constructing ' 'aws backends is currently supported'.format(backend_type.name)) backend_name = backend_type.name backend_config_filename = environ.get( "FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG") if backend_config_filename is None: raise SkipTest( 'This test requires the ability to construct an IBlockDeviceAPI ' 'in order to verify construction. Please set ' 'FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG to a yaml filepath ' 'with the dataset configuration.') backend_config_filepath = FilePath(backend_config_filename) full_backend_config = yaml.safe_load( backend_config_filepath.getContent()) backend_config = full_backend_config.get(backend_name) if 'backend' in backend_config: backend_config.pop('backend') return aws_from_configuration(cluster_id=cluster_id, **backend_config)
def get_backend_api(test_case, cluster_id): """ Get an appropriate BackendAPI for the specified dataset backend. Note this is a backdoor that is useful to be able to interact with cloud APIs in tests. For many dataset backends this does not make sense, but it provides a convenient means to interact with cloud backends such as EBS or cinder. :param test_case: The test case that is being run. :param cluster_id: The unique cluster_id, used for backend APIs that require this in order to be constructed. """ backend_config_filename = environ.get( "FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG") if backend_config_filename is None: raise SkipTest( 'This test requires the ability to construct an IBlockDeviceAPI ' 'in order to verify construction. Please set ' 'FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG to a yaml filepath ' 'with the dataset configuration.') backend_name = environ.get("FLOCKER_ACCEPTANCE_VOLUME_BACKEND") if backend_name is None: raise SkipTest( "Set acceptance testing volume backend using the " + "FLOCKER_ACCEPTANCE_VOLUME_BACKEND environment variable.") backend_config_filepath = FilePath(backend_config_filename) full_backend_config = yaml.safe_load( backend_config_filepath.getContent()) backend_config = full_backend_config.get(backend_name) if 'backend' in backend_config: backend_config.pop('backend') backend = get_backend(backend_name) return get_api(backend, pmap(backend_config), reactor, cluster_id)
def monitoring_check(checker, lasterrors_path, from_email, what, stdout, stderr): error_stream = StringIO() lasterrors = None lasterrors_fp = FilePath(lasterrors_path) if lasterrors_fp.exists(): lasterrors = lasterrors_fp.getContent() d = checker(stdout, error_stream) def cb(x): if isinstance(x, Failure): print >>stderr, str(x) if hasattr(x.value, 'response'): print >>stderr, x.value.response errors = error_stream.getvalue() print >>stderr, errors if errors != lasterrors: d2 = send_monitoring_report(errors, from_email, what) def _sent(ign): lasterrors_fp.setContent(errors) raise Exception("Sent failure report.") def _err(f): print >>stderr, str(f) return f d2.addCallbacks(_sent, _err) return d2 d.addBoth(cb) return d
def postOptions(self): if self['app-template'] is not None: template_file = FilePath(self['app-template']) self['template'] = yaml.safe_load(template_file.getContent()) elif self['apps-per-node'] > 0: raise UsageError( "app-template parameter must be provided if apps-per-node > 0" ) self['purpose'] = unicode(self['purpose']) if any(x not in string.ascii_letters + string.digits + '-' for x in self['purpose']): raise UsageError( "Purpose may have only alphanumeric symbols and dash. " + "Found {!r}".format('purpose') ) if self['cert-directory']: cert_path = FilePath(self['cert-directory']) _ensure_empty_directory(cert_path) self['cert-directory'] = cert_path # This is run last as it creates the actual "runner" object # based on the provided parameters. super(RunOptions, self).postOptions()
def test_createSSLPort(self): """ If a given valid strport description of an SSL port and the storeID of an extant factory, I{axiomatic port create} creates a new L{SSLPort} with the specified configuration and referring to that factory. The certificate file specified is copied to a path inside the Store's files directory. The port is also powered up on the store for L{IService}. """ pemPath = FilePath(self.mktemp()) pemPath.setContent(CERTIFICATE_DATA + PRIVATEKEY_DATA) store = Store(filesdir=self.mktemp()) factory = DummyFactory(store=store) self.assertSuccessStatus( self._makeConfig(store), ["create", "--strport", "ssl:8443:certKey=" + pemPath.path + ":privateKey=" + pemPath.path, "--factory-identifier", str(factory.storeID)]) self.assertEqual("Created.\n", sys.stdout.getvalue()) [ssl] = list(store.query(SSLPort)) self.assertEqual(ssl.portNumber, 8443) self.assertEqual( ssl.certificatePath.getContent(), CERTIFICATE_DATA + PRIVATEKEY_DATA) self.assertIdentical(ssl.factory, factory) self.assertEqual( pemPath.getContent(), CERTIFICATE_DATA + PRIVATEKEY_DATA) self.assertEqual(list(store.interfacesFor(ssl)), [IService])
def test_full(self): """ Running C{calendarserver_export} on the command line exports an ics file. (Almost-full integration test, starting from the main point, using as few test fakes as possible.) Note: currently the only test for directory interaction. """ yield populateCalendarsFrom( { "user02": { # TODO: more direct test for skipping inbox "inbox": { "inbox-item.ics": (valentines, {}) }, "calendar1": { "peruser.ics": (dataForTwoUsers, {}), # EST } } }, self.store ) augmentsData = """ <augments> <record> <uid>Default</uid> <enable>true</enable> <enable-calendar>true</enable-calendar> <enable-addressbook>true</enable-addressbook> </record> </augments> """ augments = FilePath(self.mktemp()) augments.setContent(augmentsData) accountsData = """ <accounts realm="Test Realm"> <user> <uid>user-under-test</uid> <guid>user02</guid> <name>Not Interesting</name> <password>very-secret</password> </user> </accounts> """ accounts = FilePath(self.mktemp()) accounts.setContent(accountsData) output = FilePath(self.mktemp()) self.accountsFile = accounts.path self.augmentsFile = augments.path main(['calendarserver_export', '--output', output.path, '--user', 'user-under-test'], reactor=self) yield self.waitToStop self.assertEquals( Component.fromString(resultForUser2), Component.fromString(output.getContent()) )
def getContent(self): data = FilePath.getContent(self) # There is a duplicate of thing2.pem, so ignore anything that # looks like it. if data == casPath.child("thing2.pem").getContent(): raise IOError(EPERM) else: return data
def test_success(self): """ Neon generates a valid document when invoked with valid data. """ unsignedPDF = FilePath(__file__).sibling('data').child('test.pdf') d = self.signPDF(unsignedPDF.getContent()) d.addCallback(self.assertValidPDF) return d
def read_serverinfo(pathtoserverinfo): serverinfofp = FilePath(pathtoserverinfo) listofinfostrings = serverinfofp.getContent().split('\n') listofinfotuples = [ _parse_serverinfo_line(infostring) for infostring in listofinfostrings if infostring ] return listofinfotuples
def test_readme(self): readme = FilePath(__file__).parent().parent().parent().child('README.rst') test = _doctest_parser.get_doctest(readme.getContent(), {}, readme.basename(), readme.path, 0) output = [] r = _doctest_runner.run(test, out=output.append) if r.failed: self.fail('%s\n%s' % (test.name, ''.join(output)))
def postOptions(self): if self['distribution'] is None: raise UsageError("Distribution required.") if self['config-file'] is not None: config_file = FilePath(self['config-file']) self['config'] = yaml.safe_load(config_file.getContent()) else: self['config'] = {} if self['flocker-version']: rpm_version = make_rpm_version(self['flocker-version']) os_version = "%s-%s" % (rpm_version.version, rpm_version.release) if os_version.endswith('.dirty'): os_version = os_version[:-len('.dirty')] else: os_version = None package_source = PackageSource( version=self['flocker-version'], os_version=os_version, branch=self['branch'], build_server=self['build-server'], ) if self['provider'] not in PROVIDERS: raise UsageError( "Provider %r not supported. Available providers: %s" % (self['provider'], ', '.join(PROVIDERS))) if self['provider'] in CLOUD_PROVIDERS: # Configuration must include credentials etc for cloud providers. try: provider_config = self['config'][self['provider']] except KeyError: raise UsageError( "Configuration file must include a " "{!r} config stanza.".format(self['provider']) ) provisioner = CLOUD_PROVIDERS[self['provider']](**provider_config) self.runner = LibcloudRunner( config=self['config'], top_level=self.top_level, distribution=self['distribution'], package_source=package_source, provisioner=provisioner, variants=self['variants'], ) else: self.runner = VagrantRunner( config=self['config'], top_level=self.top_level, distribution=self['distribution'], package_source=package_source, variants=self['variants'], )
def getCAPrivateCert(): l_privatePath = FilePath(b"ca-private-cert.pem") if l_privatePath.exists(): return PrivateCertificate.loadPEM(l_privatePath.getContent()) else: l_caKey = KeyPair.generate(size=4096) l_caCert = l_caKey.selfSignedCert(1, CN="the-authority") l_privatePath.setContent(l_caCert.dumpPEM()) return l_caCert
def push_pl(self, tsfilename): plfile = FilePath("playlist.m3u8").asBytesMode() if plfile.exists(): plcontent = plfile.getContent() if not self.paused: self.proto.sendMessage("playlist.m3u8") self.proto.sendMessage(plcontent, isBinary=True) return True
def getCAPrivateCert(): privatePath = FilePath(b"ca-private-cert.pem") if privatePath.exists(): return PrivateCertificate.loadPEM(privatePath.getContent()) else: caKey = KeyPair.generate(size=4096) caCert = caKey.selfSignedCert(1, CN="the-authority") privatePath.setContent(caCert.dumpPEM()) return caCert
def postOptions(self): if self['distribution'] is None: raise UsageError("Distribution required.") if self['config-file'] is not None: config_file = FilePath(self['config-file']) self['config'] = yaml.safe_load(config_file.getContent()) else: self['config'] = {} if self['flocker-version']: os_version = "%s-%s" % make_rpm_version(self['flocker-version']) if os_version.endswith('.dirty'): os_version = os_version[:-len('.dirty')] else: os_version = None package_source = PackageSource( version=self['flocker-version'], os_version=os_version, branch=self['branch'], build_server=self['build-server'], ) if self['provider'] not in PROVIDERS: raise UsageError( "Provider %r not supported. Available providers: %s" % (self['provider'], ', '.join(PROVIDERS))) if self['provider'] in CLOUD_PROVIDERS: # Configuration must include credentials etc for cloud providers. try: provider_config = self['config'][self['provider']] except KeyError: raise UsageError("Configuration file must include a " "{!r} config stanza.".format( self['provider'])) provisioner = CLOUD_PROVIDERS[self['provider']](**provider_config) self.runner = LibcloudRunner( config=self['config'], top_level=self.top_level, distribution=self['distribution'], package_source=package_source, provisioner=provisioner, variants=self['variants'], ) else: self.runner = VagrantRunner( config=self['config'], top_level=self.top_level, distribution=self['distribution'], package_source=package_source, variants=self['variants'], )
def createDatabase(s): imgfile = FilePath(__file__).parent().parent().child('resources').child('square.png') outfile = s.newFile('the-image') outfile.write(imgfile.getContent()) outfile.close() Mugshot(store=s, person=Person(store=s, name=u'Bob'), body=outfile.finalpath, type=u'image/png')
def _verifyfp_and_write_pubkey( (fingerprint_from_keyscan, hashed_pubkey) ): if fingerprint_from_AWS != fingerprint_from_keyscan: raise PublicKeyMismatch() print >>stderr, "The ssh public key on the server has fingerprint: %s" % (fingerprint_from_keyscan,) known_hosts_filepath = FilePath(os.path.expanduser('~')).child('.ssh').child('known_hosts') if not known_hosts_filepath.exists(): known_hosts_filepath.create() known_hosts = known_hosts_filepath.getContent().rstrip('\n') + '\n' new_known_hosts = known_hosts + hashed_pubkey known_hosts_filepath.setContent(new_known_hosts)
def parseArgs(self, control_host, deployment_config, application_config): deployment_config = FilePath(deployment_config) application_config = FilePath(application_config) if not deployment_config.exists(): raise UsageError('No file exists at {path}' .format(path=deployment_config.path)) if not application_config.exists(): raise UsageError('No file exists at {path}' .format(path=application_config.path)) self["url"] = u"https://{}:{}/v1/configuration/_compose".format( control_host, self["port"]).encode("ascii") self["application_config"] = application_config.getContent() try: self["deployment_config"] = safe_load( deployment_config.getContent()) except YAMLError as e: raise UsageError( ("Deployment configuration at {path} could not be parsed as " "YAML:\n\n{error}").format( path=deployment_config.path, error=str(e) ) ) try: self["application_config"] = safe_load( application_config.getContent()) except YAMLError as e: raise UsageError( ("Application configuration at {path} could not be parsed as " "YAML:\n\n{error}").format( path=application_config.path, error=str(e) ) ) if self["certificates-directory"] is None: self["certificates-directory"] = FilePath(os.getcwd()) else: self["certificates-directory"] = FilePath( self["certificates-directory"])
def parseArgs(self, deployment_config, application_config): deployment_config = FilePath(deployment_config) application_config = FilePath(application_config) if not deployment_config.exists(): raise UsageError('No file exists at {path}' .format(path=deployment_config.path)) if not application_config.exists(): raise UsageError('No file exists at {path}' .format(path=application_config.path)) self["deployment_config"] = deployment_config.getContent() self["application_config"] = application_config.getContent() try: deploy_config_obj = safe_load(self["deployment_config"]) except YAMLError as e: raise UsageError( ("Deployment configuration at {path} could not be parsed as " "YAML:\n\n{error}").format( path=deployment_config.path, error=str(e) ) ) try: app_config_obj = safe_load(self["application_config"]) except YAMLError as e: raise UsageError( ("Application configuration at {path} could not be parsed as " "YAML:\n\n{error}").format( path=application_config.path, error=str(e) ) ) try: self['deployment'] = model_from_configuration( application_configuration=app_config_obj, deployment_configuration=deploy_config_obj) except ConfigurationError as e: raise UsageError(str(e))
def createDatabase(s): imgfile = FilePath(__file__).parent().parent().child('resources').child( 'square.png') outfile = s.newFile('the-image') outfile.write(imgfile.getContent()) outfile.close() Mugshot(store=s, person=Person(store=s, name=u'Bob'), body=outfile.finalpath, type=u'image/png')
def test_save_description(self): """Save the description when the project is saved.""" DESCRIPTION = "hello world" manager = project.ProjectManager(self.mktemp()) prj = manager.get_project(NAME) prj.set_description(DESCRIPTION) readme = FilePath(prj.path).child("README") self.assertFalse(readme.exists()) prj.save(Factory()) self.assertEqual(readme.getContent(), DESCRIPTION)
def push_pl(self,tsfilename): plfile = FilePath("playlist.m3u8").asBytesMode() if plfile.exists(): plcontent = plfile.getContent() if not self.paused: self.proto.sendMessage("playlist.m3u8") self.proto.sendMessage(plcontent,isBinary=True) print "---> playlist.m3u8 pushed directly" else: print "---> buffer not free" return True
def push_pl(self, tsfilename): plfile = FilePath("playlist.m3u8").asBytesMode() if plfile.exists(): plcontent = plfile.getContent() if not self.paused: self.proto.sendMessage("playlist.m3u8") self.proto.sendMessage(plcontent, isBinary=True) print "---> playlist.m3u8 pushed directly" else: print "---> buffer not free" return True
def parseArgs(self, control_host, deployment_config, application_config): deployment_config = FilePath(deployment_config) application_config = FilePath(application_config) if not deployment_config.exists(): raise UsageError( 'No file exists at {path}'.format(path=deployment_config.path)) if not application_config.exists(): raise UsageError('No file exists at {path}'.format( path=application_config.path)) self["url"] = u"https://{}:{}/v1/configuration/_compose".format( control_host, self["port"]).encode("ascii") self["application_config"] = application_config.getContent() try: self["deployment_config"] = safe_load( deployment_config.getContent()) except YAMLError as e: raise UsageError( ("Deployment configuration at {path} could not be parsed as " "YAML:\n\n{error}").format(path=deployment_config.path, error=str(e))) try: self["application_config"] = safe_load( application_config.getContent()) except YAMLError as e: raise UsageError( ("Application configuration at {path} could not be parsed as " "YAML:\n\n{error}").format(path=application_config.path, error=str(e))) if self["certificates-directory"] is None: self["certificates-directory"] = FilePath(os.getcwd()) else: self["certificates-directory"] = FilePath( self["certificates-directory"])
def parseArgs(self, deployment_config, application_config): deployment_config = FilePath(deployment_config) application_config = FilePath(application_config) if not deployment_config.exists(): raise UsageError( 'No file exists at {path}'.format(path=deployment_config.path)) if not application_config.exists(): raise UsageError('No file exists at {path}'.format( path=application_config.path)) self["deployment_config"] = deployment_config.getContent() self["application_config"] = application_config.getContent() try: deploy_config_obj = safe_load(self["deployment_config"]) except YAMLError as e: raise UsageError( ("Deployment configuration at {path} could not be parsed as " "YAML:\n\n{error}").format(path=deployment_config.path, error=str(e))) try: app_config_obj = safe_load(self["application_config"]) except YAMLError as e: raise UsageError( ("Application configuration at {path} could not be parsed as " "YAML:\n\n{error}").format(path=application_config.path, error=str(e))) try: self['deployment'] = model_from_configuration( application_configuration=app_config_obj, deployment_configuration=deploy_config_obj) except ConfigurationError as e: raise UsageError(str(e))
def writeFiles(targets): for target in targets: out = FilePath(target["output"]) newContent = _makeGeneratedFileContent(target) # Check if the file needs to be updated. Avoid writing if it's # identical, to avoid triggering automatic rebuilds of compiled # JavaScript. try: oldContent = out.getContent() except IOError: oldContent = None if newContent != oldContent: out.setContent(newContent)
def _generateReadme(self): """ Generate a generic readme from the tasks available. """ # FIXME: Clean this up # https://github.com/twisted-infra/braid/issues/7 readmeFile = FilePath(__file__).sibling('README') readmeContext = {} for key in ['configDir', 'runDir', 'logDir', 'binDir', 'serviceName']: readmeContext[key] = getattr(self, key) tasks = self.getTasks().itervalues() tasks = [(t.name, t.__doc__.strip().splitlines()[0]) for t in tasks] tasks = [' - {}: {}'.format(*t) for t in tasks] readmeContext['tasks'] = '\n'.join(tasks) return readmeFile.getContent().format(**readmeContext)
def loadFile(self, filename): """ Load records from C{filename}. @param filename: file to read from @type filename: L{bytes} """ fp = FilePath(filename) # Not the best way to set an origin. It can be set using $ORIGIN # though. self.origin = nativeString(fp.basename() + b'.') lines = fp.getContent().splitlines(True) lines = self.stripComments(lines) lines = self.collapseContinuations(lines) self.parseLines(lines)
def createDatabase(store): """ Make L{Person} and L{Mugshot} items. Set the C{body} and C{smallerBody} attributes of the L{Mugshot} item to point at a copy of I{xmantissa/test/resources/square.png} beneath the store's directory. """ atomicImageFile = store.newFile(*MUGSHOT_BODY_PATH_SEGMENTS) imageFilePath = FilePath(__file__).parent().parent().child( 'resources').child('square.png') atomicImageFile.write(imageFilePath.getContent()) atomicImageFile.close() Mugshot(store=store, person=Person(store=store), body=atomicImageFile.finalpath, smallerBody=atomicImageFile.finalpath, type=MUGSHOT_TYPE)
def push_ts(self,tsfilename): tsfile = FilePath(tsfilename).asBytesMode() tscontent = tsfile.getContent() if not self.paused: if len(self.ts_pool) > 0: tscontent,tsfilename = self.ts_pool.pop(0) self.proto.sendMessage(tsfilename) self.proto.sendMessage(tscontent,isBinary=True) print "---> segment pushed from buffer" else: self.proto.sendMessage(tsfilename) self.proto.sendMessage(tscontent,isBinary=True) print "---> segment pushed directly" else: self.ts_pool.append((tscontent,tsfilename)) return tsfilename
def postOptions(self): if self['app-template'] is not None: template_file = FilePath(self['app-template']) self['template'] = yaml.safe_load(template_file.getContent()) elif self['apps-per-node'] > 0: raise UsageError( "app-template parameter must be provided if apps-per-node > 0") self['purpose'] = unicode(self['purpose']) if any(x not in string.ascii_letters + string.digits + '-' for x in self['purpose']): raise UsageError( "Purpose may have only alphanumeric symbols and dash. " + "Found {!r}".format('purpose')) # This is run last as it creates the actual "runner" object # based on the provided parameters. super(RunOptions, self).postOptions()
def push_ts(self, tsfilename, tscount): tsfile = FilePath(tsfilename).asBytesMode() tscontent = tsfile.getContent() if not self.paused: if len(self.ts_pool) > 0: tscontent, tsfilename = self.ts_pool.pop(0) self.proto.sendMessage(tsfilename) self.proto.sendMessage(tscontent, isBinary=True) print "---> segment pushed from buffer" else: self.proto.sendMessage(tsfilename) self.proto.sendMessage(tscontent, isBinary=True) print "---> segment pushed directly" else: self.ts_pool.append((tscontent, tsfilename)) return tsfilename
def onConnect(self, request): myAES = AESCipher(key) if 'cookie' in request.headers: try: cookie = Cookie.SimpleCookie() cookie.load(str(request.headers['cookie'])) except Cookie.CookieError: pass if ('wsid' in cookie) and ('PSClient' in request.headers['user-agent']): wsid = cookie['wsid'].value cambot = json.loads(myAES.decrypt(wsid)) if cambot['id'] in request.path: self.temp_location = self.factory.temp_path.child( cambot['id']) if not self.temp_location.exists(): self.temp_location.makedirs() f = self.temp_location.child(u'index.html') g = FilePath("/home/chetan/pscore/templates/live_hls.html" ).asTextMode() content = g.getContent() new = content.replace("++camid++", cambot['id']) f.setContent(new) return None else: self.sendClose(1000, "Not authorised") elif ('wsid' in cookie) and ('gtoken' in cookie): wsid = cookie['wsid'].value gtoken = cookie['gtoken'].value user_data = json.loads(myAES.decrypt(wsid)) gitkit_user = gitkit_instance.VerifyGitkitToken(gtoken) mycams = CamCheck().cbquery(gitkit_user.user_id) camid = request.path.split("/")[3] self.temp_location = self.factory.temp_path.child(camid) if any(camid in x for x in mycams): return None else: self.sendClose(1000, "Not authorised") else: self.sendClose(1000, "Not authorised") else: self.sendClose(1000, "Not authorised")
def render(options): """ Renders a file to stdout surrounding it by a base template if supplied """ rendered = renderFile(options['source'], options['include']) # mimick blogger's overuse of <br /> rendered['body'] = rendered['body'].replace('\n', '<br />') if options['template']: base = FilePath(options['template']) t = jenv.from_string(base.getContent()) print t.render({ 'title': rendered['headers']['title'], 'content': rendered['body'], }) else: print rendered['body']
def onConnect(self,request): myAES = AESCipher(key) if 'cookie' in request.headers: try: cookie = Cookie.SimpleCookie() cookie.load(str(request.headers['cookie'])) except Cookie.CookieError: pass if ('wsid' in cookie) and ('PSClient' in request.headers['user-agent']): wsid = cookie['wsid'].value cambot = json.loads(myAES.decrypt(wsid)) if cambot['id'] in request.path: self.temp_location = self.factory.temp_path.child(cambot['id']) if not self.temp_location.exists(): self.temp_location.makedirs() f = self.temp_location.child(u'index.html') g = FilePath("/home/chetan/pscore/templates/live_hls.html").asTextMode() content = g.getContent() new = content.replace("++camid++",cambot['id']) f.setContent(new) return None else: self.sendClose(1000,"Not authorised") elif ('wsid' in cookie) and ('gtoken' in cookie): wsid = cookie['wsid'].value gtoken = cookie['gtoken'].value user_data = json.loads(myAES.decrypt(wsid)) gitkit_user = gitkit_instance.VerifyGitkitToken(gtoken) mycams = CamCheck().cbquery(gitkit_user.user_id) camid = request.path.split("/")[3] self.temp_location = self.factory.temp_path.child(camid) if any(camid in x for x in mycams): return None else: self.sendClose(1000,"Not authorised") else: self.sendClose(1000,"Not authorised") else: self.sendClose(1000,"Not authorised")