def _manager(): mkdir_p(os.path.dirname(args.path)) manager = BlobManager( args.path, args.url, 'A' * 32, args.secret, args.uuid, args.token, args.cert_file) return manager
def set_autostart(enabled): """ Set the autostart mode to enabled or disabled depending on the parameter. If `enabled` is `True`, save the autostart file to its place. Otherwise, remove that file. Right now we support only Linux autostart. :param enabled: whether the autostart should be enabled or not. :type enabled: bool """ # we don't do autostart for bundle or systems different than Linux if flags.STANDALONE or not IS_LINUX: return if enabled: mkdir_p(DESKTOP_ENTRY_PATH) with open(DESKTOP_ENTRY_FILE, 'w') as f: f.write(DESKTOP_ENTRY) else: try: os.remove(DESKTOP_ENTRY_FILE) except OSError: # if the file does not exist pass except Exception as e: logger.error("Problem disabling autostart, {0!r}".format(e))
def _download_ca_cert(self, *args): """ Downloads the CA cert that is going to be used for the api URL """ # XXX maybe we can skip this step if # we have a fresh one. leap_assert( self._provider_config, "Cannot download the ca cert " "without a provider config!") logger.debug("Downloading ca cert for %r at %r" % (self._domain, self._provider_config.get_ca_cert_uri())) if not self._should_proceed_cert(): check_and_fix_urw_only( self._provider_config.get_ca_cert_path(about_to_download=True)) return res = self._session.get(self._provider_config.get_ca_cert_uri(), verify=self.verify, timeout=REQUEST_TIMEOUT) res.raise_for_status() cert_path = self._provider_config.get_ca_cert_path( about_to_download=True) cert_dir = os.path.dirname(cert_path) mkdir_p(cert_dir) with open(cert_path, "w") as f: f.write(res.content) check_and_fix_urw_only(cert_path)
def __init__( self, local_path, remote, key, secret, user, token=None, cert_file=None, remote_stream=None): """ Initialize the blob manager. :param local_path: The path for the local blobs database. :type local_path: str :param remote: The URL of the remote storage. :type remote: str :param secret: The secret used to encrypt/decrypt blobs. :type secret: str :param user: The uuid of the user. :type user: str :param token: The access token for interacting with remote storage. :type token: str :param cert_file: The path to the CA certificate file. :type cert_file: str :param remote_stream: Remote storage stream URL, if supported. :type remote_stream: str """ super(BlobsSynchronizer, self).__init__() if local_path: mkdir_p(os.path.dirname(local_path)) self.local = SQLiteBlobBackend(local_path, key=key, user=user) self.remote = remote self.remote_stream = remote_stream self.secret = secret self.user = user self._client = HTTPClient(user, token, cert_file) self.semaphore = defer.DeferredSemaphore(self.concurrent_writes_limit) self.locks = defaultdict(defer.DeferredLock)
def _download_ca_cert(self, *args): """ Downloads the CA cert that is going to be used for the api URL """ # XXX maybe we can skip this step if # we have a fresh one. leap_assert(self._provider_config, "Cannot download the ca cert " "without a provider config!") logger.debug("Downloading ca cert for %r at %r" % (self._domain, self._provider_config.get_ca_cert_uri())) if not self._should_proceed_cert(): check_and_fix_urw_only( self._provider_config .get_ca_cert_path(about_to_download=True)) return res = self._session.get(self._provider_config.get_ca_cert_uri(), verify=self.verify, timeout=REQUEST_TIMEOUT) res.raise_for_status() cert_path = self._provider_config.get_ca_cert_path( about_to_download=True) cert_dir = os.path.dirname(cert_path) mkdir_p(cert_dir) with open(cert_path, "w") as f: f.write(res.content) check_and_fix_urw_only(cert_path)
def maybe_download_provider_info(self, replace=False): """ Download the provider.json info from the main domain. This SHOULD only be used once with the DOMAIN url. """ # TODO handle pre-seeded providers? # or let client handle that? We could move them to bonafide. provider_json = self._get_provider_json_path() if is_file(provider_json) and not replace: return defer.succeed('provider_info_already_exists') folders, f = os.path.split(provider_json) mkdir_p(folders) uri = self._disco.get_provider_info_uri() met = self._disco.get_provider_info_method() def errback(failure): shutil.rmtree(folders) raise NetworkError(failure.getErrorMessage()) d = httpRequest(self._http._agent, uri, method=met, saveto=provider_json) d.addCallback(lambda _: self._load_provider_json()) d.addErrback(errback) return d
def start(self): """ Start a CouchDB instance for a test. """ self.tempdir = tempfile.mkdtemp(suffix='.couch.test') path = os.path.join(os.path.dirname(__file__), 'couchdb.ini.template') handle = open(path) conf = handle.read() % { 'tempdir': self.tempdir, } handle.close() confPath = os.path.join(self.tempdir, 'test.ini') handle = open(confPath, 'w') handle.write(conf) handle.close() # create the dirs from the template mkdir_p(os.path.join(self.tempdir, 'lib')) mkdir_p(os.path.join(self.tempdir, 'log')) args = ['couchdb', '-n', '-a', confPath] null = open('/dev/null', 'w') self.process = subprocess.Popen( args, env=None, stdout=null.fileno(), stderr=null.fileno(), close_fds=True) # find port logPath = os.path.join(self.tempdir, 'log', 'couch.log') while not os.path.exists(logPath): if self.process.poll() is not None: got_stdout, got_stderr = "", "" if self.process.stdout is not None: got_stdout = self.process.stdout.read() if self.process.stderr is not None: got_stderr = self.process.stderr.read() raise Exception(""" couchdb exited with code %d. stdout: %s stderr: %s""" % ( self.process.returncode, got_stdout, got_stderr)) time.sleep(0.01) while os.stat(logPath).st_size == 0: time.sleep(0.01) PORT_RE = re.compile( 'Apache CouchDB has started on http://127.0.0.1:(?P<port>\d+)') handle = open(logPath) line = handle.read() handle.close() m = PORT_RE.search(line) if not m: self.stop() raise Exception("Cannot find port in line %s" % line) self.port = int(m.group('port'))
def start(self): """ Start a CouchDB instance for a test. """ self.tempdir = tempfile.mkdtemp(suffix='.couch.test') path = os.path.join(os.path.dirname(__file__), 'couchdb.ini.template') handle = open(path) conf = handle.read() % { 'tempdir': self.tempdir, } handle.close() confPath = os.path.join(self.tempdir, 'test.ini') handle = open(confPath, 'w') handle.write(conf) handle.close() # create the dirs from the template mkdir_p(os.path.join(self.tempdir, 'lib')) mkdir_p(os.path.join(self.tempdir, 'log')) args = ['couchdb', '-n', '-a', confPath] null = open('/dev/null', 'w') self.process = subprocess.Popen(args, env=None, stdout=null.fileno(), stderr=null.fileno(), close_fds=True) # find port logPath = os.path.join(self.tempdir, 'log', 'couch.log') while not os.path.exists(logPath): if self.process.poll() is not None: got_stdout, got_stderr = "", "" if self.process.stdout is not None: got_stdout = self.process.stdout.read() if self.process.stderr is not None: got_stderr = self.process.stderr.read() raise Exception(""" couchdb exited with code %d. stdout: %s stderr: %s""" % (self.process.returncode, got_stdout, got_stderr)) time.sleep(0.01) while os.stat(logPath).st_size == 0: time.sleep(0.01) PORT_RE = re.compile( 'Apache CouchDB has started on http://127.0.0.1:(?P<port>\d+)') handle = open(logPath) line = handle.read() handle.close() m = PORT_RE.search(line) if not m: self.stop() raise Exception("Cannot find port in line %s" % line) self.port = int(m.group('port'))
def download_client_cert(provider_config, path, session): """ Downloads the client certificate for each service. :param provider_config: instance of a ProviderConfig :type provider_config: ProviderConfig :param path: the path to download the cert to. :type path: str :param session: a fetcher.session instance. For the moment we only support requests.sessions :type session: requests.sessions.Session """ # TODO we should implement the @with_srp_auth decorator # again. srp_auth = SRPAuth(provider_config) session_id = srp_auth.get_session_id() token = srp_auth.get_token() cookies = None if session_id is not None: cookies = {"_session_id": session_id} cert_uri = "%s/%s/cert" % ( provider_config.get_api_uri(), provider_config.get_api_version()) logger.debug('getting cert from uri: %s' % cert_uri) headers = {} # API v2 will only support token auth, but in v1 we can send both if token is not None: headers["Authorization"] = 'Token token="{0}"'.format(token) res = session.get(cert_uri, verify=provider_config .get_ca_cert_path(), cookies=cookies, timeout=REQUEST_TIMEOUT, headers=headers) res.raise_for_status() client_cert = res.content if not leap_certs.is_valid_pemfile(client_cert): # XXX raise more specific exception. raise Exception("The downloaded certificate is not a " "valid PEM file") mkdir_p(os.path.dirname(path)) try: with open(path, "w") as f: f.write(client_cert) except IOError as exc: logger.error( "Error saving client cert: %r" % (exc,)) raise check_and_fix_urw_only(path)
def __init__(self, basedir='~/.config/leap'): service.MultiService.__init__(self) path = os.path.abspath(os.path.expanduser(basedir)) if not os.path.isdir(path): files.mkdir_p(path) self.basedir = path # creates self.config self.read_config()
def __init__(self, basedir=DEFAULT_BASEDIR): service.MultiService.__init__(self) path = os.path.abspath(os.path.expanduser(basedir)) if not os.path.isdir(path): files.mkdir_p(path) self.basedir = path # creates self.config self.read_config()
def start(self): """ Start a CouchDB instance for a test. """ self.tempdir = tempfile.mkdtemp(suffix=".couch.test") path = os.path.join(os.path.dirname(__file__), "couchdb.ini.template") handle = open(path) conf = handle.read() % {"tempdir": self.tempdir} handle.close() confPath = os.path.join(self.tempdir, "test.ini") handle = open(confPath, "w") handle.write(conf) handle.close() # create the dirs from the template mkdir_p(os.path.join(self.tempdir, "lib")) mkdir_p(os.path.join(self.tempdir, "log")) args = ["couchdb", "-n", "-a", confPath] null = open("/dev/null", "w") self.process = subprocess.Popen(args, env=None, stdout=null.fileno(), stderr=null.fileno(), close_fds=True) # find port logPath = os.path.join(self.tempdir, "log", "couch.log") while not os.path.exists(logPath): if self.process.poll() is not None: got_stdout, got_stderr = "", "" if self.process.stdout is not None: got_stdout = self.process.stdout.read() if self.process.stderr is not None: got_stderr = self.process.stderr.read() raise Exception( """ couchdb exited with code %d. stdout: %s stderr: %s""" % (self.process.returncode, got_stdout, got_stderr) ) time.sleep(0.01) while os.stat(logPath).st_size == 0: time.sleep(0.01) PORT_RE = re.compile("Apache CouchDB has started on http://127.0.0.1:(?P<port>\d+)") handle = open(logPath) m = None line = handle.readline() while m is None: m = PORT_RE.search(line) line = handle.readline() handle.close() self.port = int(m.group("port"))
def test_get_tag(self): expected_tag = base64.urlsafe_b64encode('B' * 16) backend = _blobs.FilesystemBlobsBackend(blobs_path=self.tempdir) # write a blob... path = backend._get_path('user', 'blob_id', '') mkdir_p(os.path.split(path)[0]) with open(path, "w") as f: f.write('A' * 40 + 'B' * 16) # ...and get its tag tag = yield backend.get_tag('user', 'blob_id') self.assertEquals(expected_tag, tag)
def test_get_blob_size(self): # get a backend backend = _blobs.FilesystemBlobsBackend(blobs_path=self.tempdir) # write a blob with size=10 path = backend._get_path('user', 'blob_id', '') mkdir_p(os.path.split(path)[0]) with open(path, "w") as f: f.write("0123456789") # check it's size size = yield backend.get_blob_size('user', 'blob_id', '') self.assertEquals(10, size)
def __init__(self, config_file, basedir=DEFAULT_BASEDIR, default_config=""): path = os.path.abspath(os.path.expanduser(basedir)) if not os.path.isdir(path): files.mkdir_p(path) self.config_path = os.path.join(path, config_file) self.default_config = default_config self.read()
def setUp(self): """ Sets up this TestCase with a simple and faked provider instance: * runs a threaded reactor * loads a mocked ProviderConfig that points to the certs in the leap.common.testing module. """ factory = fake_provider.get_provider_factory() http = reactor.listenTCP(0, factory) https = reactor.listenSSL(0, factory, fake_provider.OpenSSLServerContextFactory()) get_port = lambda p: p.getHost().port self.http_port = get_port(http) self.https_port = get_port(https) provider = ProviderConfig() provider.get_ca_cert_path = mock.create_autospec( provider.get_ca_cert_path) provider.get_ca_cert_path.return_value = _get_capath() provider.get_api_uri = mock.create_autospec(provider.get_api_uri) provider.get_api_uri.return_value = self._get_https_uri() loaded = provider.load(path=os.path.join(_here, "test_provider.json")) if not loaded: raise ImproperlyConfiguredError( "Could not load test provider config") self.register = srpregister.SRPRegister(provider_config=provider) self.provider = provider self.TEST_USER = "******" self.TEST_PASS = "******" # Reset the singleton srpauth.SRPAuth._SRPAuth__instance = None self.auth = srpauth.SRPAuth(self.provider) self.auth_backend = self.auth._SRPAuth__instance self.old_post = self.auth_backend._session.post self.old_put = self.auth_backend._session.put self.old_delete = self.auth_backend._session.delete self.old_start_auth = self.auth_backend._start_authentication self.old_proc_challenge = self.auth_backend._process_challenge self.old_extract_data = self.auth_backend._extract_data self.old_verify_session = self.auth_backend._verify_session self.old_auth_preproc = self.auth_backend._authentication_preprocessing self.old_get_sid = self.auth_backend.get_session_id self.old_cookie_get = self.auth_backend._session.cookies.get self.old_auth = self.auth_backend.authenticate # HACK: this is needed since it seems that the backend settings path is # not using the right path mkdir_p('config/leap')
def download_client_cert(provider_config, path, session): """ Downloads the client certificate for each service. :param provider_config: instance of a ProviderConfig :type provider_config: ProviderConfig :param path: the path to download the cert to. :type path: str :param session: a fetcher.session instance. For the moment we only support requests.sessions :type session: requests.sessions.Session """ # TODO we should implement the @with_srp_auth decorator # again. srp_auth = SRPAuth(provider_config) session_id = srp_auth.get_session_id() token = srp_auth.get_token() cookies = None if session_id is not None: cookies = {"_session_id": session_id} cert_uri = "%s/%s/cert" % (provider_config.get_api_uri(), provider_config.get_api_version()) logger.debug('getting cert from uri: %s' % cert_uri) headers = {} # API v2 will only support token auth, but in v1 we can send both if token is not None: headers["Authorization"] = 'Token token="{0}"'.format(token) res = session.get(cert_uri, verify=provider_config.get_ca_cert_path(), cookies=cookies, timeout=REQUEST_TIMEOUT, headers=headers) res.raise_for_status() client_cert = res.content if not leap_certs.is_valid_pemfile(client_cert): # XXX raise more specific exception. raise Exception("The downloaded certificate is not a " "valid PEM file") mkdir_p(os.path.dirname(path)) try: with open(path, "w") as f: f.write(client_cert) except IOError as exc: logger.error("Error saving client cert: %r" % (exc, )) raise check_and_fix_urw_only(path)
def test_delete_blob(self, unlink_mock): backend = _blobs.FilesystemBlobsBackend(blobs_path=self.tempdir) # write a blob... path = backend._get_path('user', 'blob_id', '') mkdir_p(os.path.split(path)[0]) with open(path, "w") as f: f.write("bl0b") # ...and delete it yield backend.delete_blob('user', 'blob_id') unlink_mock.assert_any_call(backend._get_path('user', 'blob_id')) unlink_mock.assert_any_call( backend._get_path('user', 'blob_id') + '.flags')
def touch(self, filepath): folder, filename = os.path.split(filepath) if not os.path.isdir(folder): mkdir_p(folder) # XXX should move to test_basetest self.assertTrue(os.path.isdir(folder)) with open(filepath, 'w') as fp: fp.write(' ') # XXX should move to test_basetest self.assertTrue(os.path.isfile(filepath))
def generate_certificates(): """ Generate client and server CURVE certificate files. """ # Create directory for certificates, remove old content if necessary if os.path.exists(KEYS_DIR): shutil.rmtree(KEYS_DIR) mkdir_p(KEYS_DIR) # create new keys in certificates dir # public_file, secret_file = create_certificates(...) zmq.auth.create_certificates(KEYS_DIR, "frontend") zmq.auth.create_certificates(KEYS_DIR, "backend")
def maybe_download_ca_cert(self, ignored): """ :rtype: deferred """ path = self._get_ca_cert_path() if is_file(path): return defer.succeed('ca_cert_path_already_exists') uri = self._get_ca_cert_uri() mkdir_p(os.path.split(path)[0]) d = downloadPage(uri, path) d.addErrback(log.err) return d
def setUp(self): """ Sets up this TestCase with a simple and faked provider instance: * runs a threaded reactor * loads a mocked ProviderConfig that points to the certs in the leap.common.testing module. """ factory = fake_provider.get_provider_factory() http = reactor.listenTCP(0, factory) https = reactor.listenSSL(0, factory, fake_provider.OpenSSLServerContextFactory()) get_port = lambda p: p.getHost().port self.http_port = get_port(http) self.https_port = get_port(https) provider = ProviderConfig() provider.get_ca_cert_path = mock.create_autospec(provider.get_ca_cert_path) provider.get_ca_cert_path.return_value = _get_capath() provider.get_api_uri = mock.create_autospec(provider.get_api_uri) provider.get_api_uri.return_value = self._get_https_uri() loaded = provider.load(path=os.path.join(_here, "test_provider.json")) if not loaded: raise ImproperlyConfiguredError("Could not load test provider config") self.register = srpregister.SRPRegister(provider_config=provider) self.provider = provider self.TEST_USER = "******" self.TEST_PASS = "******" # Reset the singleton srpauth.SRPAuth._SRPAuth__instance = None self.auth = srpauth.SRPAuth(self.provider) self.auth_backend = self.auth._SRPAuth__instance self.old_post = self.auth_backend._session.post self.old_put = self.auth_backend._session.put self.old_delete = self.auth_backend._session.delete self.old_start_auth = self.auth_backend._start_authentication self.old_proc_challenge = self.auth_backend._process_challenge self.old_extract_data = self.auth_backend._extract_data self.old_verify_session = self.auth_backend._verify_session self.old_auth_preproc = self.auth_backend._authentication_preprocessing self.old_get_sid = self.auth_backend.get_session_id self.old_cookie_get = self.auth_backend._session.cookies.get self.old_auth = self.auth_backend.authenticate # HACK: this is needed since it seems that the backend settings path is # not using the right path mkdir_p("config/leap")
def _download_certificate_test_template(self, ifneeded, createcert): """ All download client certificate tests have the same structure, so this is a parametrized test for that. :param ifneeded: sets _download_if_needed :type ifneeded: bool :param createcert: if True it creates a dummy file to play the part of a downloaded certificate :type createcert: bool :returns: the temp eip cert path and the dummy cert contents :rtype: tuple of str, str """ pc = ProviderConfig() ec = EIPConfig() self.eb._provider_config = pc self.eb._eip_config = ec pc.get_domain = mock.MagicMock( return_value="localhost:%s" % (self.https_port)) pc.get_api_uri = mock.MagicMock( return_value="https://%s" % (pc.get_domain())) pc.get_api_version = mock.MagicMock(return_value="1") pc.get_ca_cert_path = mock.MagicMock(return_value=False) path_prefix = tempfile.mkdtemp() util.get_path_prefix = mock.MagicMock(return_value=path_prefix) EIPConfig.save = mock.MagicMock() EIPConfig.load = mock.MagicMock() self.eb._download_if_needed = ifneeded provider_dir = os.path.join(util.get_path_prefix(), "leap", "providers", "somedomain") mkdir_p(provider_dir) eip_cert_path = os.path.join(provider_dir, "cert") ec.get_client_cert_path = mock.MagicMock( return_value=eip_cert_path) cert_content = "A" if createcert: with open(eip_cert_path, "w") as ec: ec.write(cert_content) return eip_cert_path, cert_content
def _download_client_certificates(self, *args): """ Downloads the EIP client certificate for the given provider """ leap_assert(self._provider_config, "We need a provider configuration!") leap_assert(self._eip_config, "We need an eip configuration!") logger.debug("Downloading EIP client certificate for %s" % (self._provider_config.get_domain(),)) client_cert_path = self._eip_config.\ get_client_cert_path(self._provider_config, about_to_download=True) # For re-download if something is wrong with the cert self._download_if_needed = self._download_if_needed and \ not certs.should_redownload(client_cert_path) if self._download_if_needed and \ os.path.exists(client_cert_path): check_and_fix_urw_only(client_cert_path) return srp_auth = SRPAuth(self._provider_config) session_id = srp_auth.get_session_id() cookies = None if session_id: cookies = {"_session_id": session_id} cert_uri = "%s/%s/cert" % ( self._provider_config.get_api_uri(), self._provider_config.get_api_version()) logger.debug('getting cert from uri: %s' % cert_uri) res = self._session.get(cert_uri, verify=self._provider_config .get_ca_cert_path(), cookies=cookies, timeout=REQUEST_TIMEOUT) res.raise_for_status() client_cert = res.content if not certs.is_valid_pemfile(client_cert): raise Exception(self.tr("The downloaded certificate is not a " "valid PEM file")) mkdir_p(os.path.dirname(client_cert_path)) with open(client_cert_path, "w") as f: f.write(client_cert) check_and_fix_urw_only(client_cert_path)
def __init__(self, local_path, remote, key, secret, user, token=None, cert_file=None): if local_path: mkdir_p(os.path.dirname(local_path)) self.local = SQLiteBlobBackend(local_path, key=key, user=user) self.remote = remote self.secret = secret self.user = user self._client = HTTPClient(user, token, cert_file)
def touch(self, filepath): """ Touches a filepath, creating folders along the way if needed. :param filepath: path to be touched :type filepath: str """ folder, filename = os.path.split(filepath) if not os.path.isdir(folder): mkdir_p(folder) self.assertTrue(os.path.isdir(folder)) with open(filepath, 'w') as fp: fp.write(' ') self.assertTrue(os.path.isfile(filepath))
def new_method(*args, **kwargs): dirname, _ = os.path.split(path) mkdir_p(dirname) name = path + '.lock' # TODO: evaluate the need to replace this for a readers-writer lock lock = defer.DeferredFilesystemLock(name) def _release(result): lock.unlock() return result d = lock.deferUntilLocked() d.addCallback(lambda _: method(*args, **kwargs)) d.addCallbacks(_release, _release) return d
def generate_zmq_certificates(): """ Generate client and server CURVE certificate files. """ # Create directory for certificates, remove old content if necessary if os.path.exists(KEYS_DIR): shutil.rmtree(KEYS_DIR) mkdir_p(KEYS_DIR) # set permissions to: 0700 (U:rwx G:--- O:---) os.chmod(KEYS_DIR, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) # create new keys in certificates dir # public_file, secret_file = create_certificates(...) zmq.auth.create_certificates(KEYS_DIR, "frontend") zmq.auth.create_certificates(KEYS_DIR, "backend")
def get_logger(perform_rollover=False): """ Push to the app stack the needed handlers and return a Logger object. :rtype: logbook.Logger """ # NOTE: make sure that the folder exists, the logger is created before # saving settings on the first run. _base = os.path.join(get_path_prefix(), "leap") mkdir_p(_base) bitmask_log_file = os.path.join(_base, 'bitmask.log') level = logbook.WARNING if flags.DEBUG: level = logbook.NOTSET # This handler consumes logs not handled by the others null_handler = logbook.NullHandler() null_handler.push_application() silencer = SelectiveSilencerFilter() zmq_handler = SafeZMQHandler('tcp://127.0.0.1:5000', multi=True, level=level, filter=silencer.filter) zmq_handler.push_application() file_handler = logbook.RotatingFileHandler( bitmask_log_file, format_string=LOG_FORMAT, bubble=True, filter=silencer.filter, max_size=sys.maxint) if perform_rollover: file_handler.perform_rollover() file_handler.push_application() # don't use simple stream, go for colored log handler instead # stream_handler = logbook.StreamHandler(sys.stdout, # format_string=LOG_FORMAT, # bubble=True) # stream_handler.push_application() stream_handler = ColorizedStderrHandler( level=level, format_string=LOG_FORMAT, bubble=True, filter=silencer.filter) stream_handler.push_application() logger = logbook.Logger('leap') return logger
def _download_config_test_template(self, ifneeded, new): """ All download config tests have the same structure, so this is a parametrized test for that. :param ifneeded: sets _download_if_needed :type ifneeded: bool :param new: if True uses time.time() as mtime for the mocked eip-service file, otherwise it uses 100 (a really old mtime) :type new: float or int (will be coersed) """ pc = ProviderConfig() pc.get_domain = mock.MagicMock( return_value="localhost:%s" % (self.https_port)) self.eb._provider_config = pc pc.get_api_uri = mock.MagicMock( return_value="https://%s" % (pc.get_domain())) pc.get_api_version = mock.MagicMock(return_value="1") # This is to ignore https checking, since it's not the point # of this test pc.get_ca_cert_path = mock.MagicMock(return_value=False) path_prefix = tempfile.mkdtemp() util.get_path_prefix = mock.MagicMock(return_value=path_prefix) EIPConfig.save = mock.MagicMock() EIPConfig.load = mock.MagicMock() self.eb._download_if_needed = ifneeded provider_dir = os.path.join(util.get_path_prefix(), "leap", "providers", pc.get_domain()) mkdir_p(provider_dir) eip_config_path = os.path.join(provider_dir, "eip-service.json") with open(eip_config_path, "w") as ec: ec.write("A") # set mtime to something really new if new: os.utime(eip_config_path, (-1, time.time())) else: os.utime(eip_config_path, (-1, 100))
def maybe_download_ca_cert(self, ignored): """ :rtype: deferred """ def errback(self, failure): raise NetworkError(failure.getErrorMessage()) path = self._get_ca_cert_path() if is_file(path): return defer.succeed('ca_cert_path_already_exists') uri = self._get_ca_cert_uri() mkdir_p(os.path.split(path)[0]) d = downloadPage(uri, path) d.addErrback(errback) return d
def _produce_dummy_provider_json(self): """ Creates a dummy provider json on disk in order to test behaviour around it (download if newer online, etc) :returns: the provider.json path used :rtype: str """ provider_dir = os.path.join(util.get_path_prefix(), "leap", "providers", self.pb._domain) mkdir_p(provider_dir) provider_path = os.path.join(provider_dir, "provider.json") with open(provider_path, "w") as p: p.write("A") return provider_path
def __init__(self, path, key=None, user=None): dbname = '%s_blobs.db' % (user or 'soledad') self.path = os.path.abspath( os.path.join(path, dbname)) mkdir_p(os.path.dirname(self.path)) if not key: raise ValueError('key cannot be None') backend = 'pysqlcipher.dbapi2' opts = sqlcipher.SQLCipherOptions( '/tmp/ignored', binascii.b2a_hex(key), is_raw_key=True, create=True) openfun = partial(pragmas.set_init_pragmas, opts=opts, schema_func=_init_tables) self.dbpool = ConnectionPool( backend, self.path, check_same_thread=False, timeout=5, cp_openfun=openfun, cp_min=2, cp_max=2, cp_name='blob_pool')
def generate_zmq_certificates(): """ Generate client and server CURVE certificate files. """ leap_assert(flags.ZMQ_HAS_CURVE, "CurveZMQ not supported!") keys_dir = _get_keys_dir() # Create directory for certificates, remove old content if necessary if os.path.exists(keys_dir): shutil.rmtree(keys_dir) mkdir_p(keys_dir) # set permissions to: 0700 (U:rwx G:--- O:---) os.chmod(keys_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) # create new keys in certificates dir # public_file, secret_file = create_certificates(...) zmq.auth.create_certificates(keys_dir, "frontend") zmq.auth.create_certificates(keys_dir, "backend")
def _consume_stream(self, user, namespace, request): chunk_size = 2**14 content = request.content incoming_list = json.loads(content.readline()) for (blob_id, size) in incoming_list: db = self._handler # TODO: NEEDS SANITIZING path = db._get_path(user, blob_id, namespace) try: mkdir_p(os.path.split(path)[0]) except OSError as e: logger.warn("Got exception trying to create directory: %r" % e) with open(path, 'wb') as blob_fd: consumed = 0 while consumed < size: read_size = min(size - consumed, chunk_size) data = content.read(read_size) consumed += read_size blob_fd.write(data)
def maybe_create_and_get_certificates(basedir, name): """ Generate the needed ZMQ certificates for backend/frontend communication if needed. """ assert_zmq_has_curve() private_keys_dir = os.path.join(basedir, PRIVATE_KEYS_PREFIX) private_key = os.path.join(private_keys_dir, name + ".key_secret") if not os.path.isfile(private_key): mkdir_p(private_keys_dir) zmq.auth.create_certificates(private_keys_dir, name) # set permissions to: 0700 (U:rwx G:--- O:---) os.chmod(private_key, stat.S_IRUSR | stat.S_IWUSR) # move public key to public keys directory public_keys_dir = os.path.join(basedir, PUBLIC_KEYS_PREFIX) old_public_key = os.path.join(private_keys_dir, name + ".key") new_public_key = os.path.join(public_keys_dir, name + ".key") mkdir_p(public_keys_dir) shutil.move(old_public_key, new_public_key) return zmq.auth.load_certificate(private_key)
def maybe_download_ca_cert(self, ignored): """ :rtype: deferred """ path = self._get_ca_cert_path() if is_file(path): return defer.succeed('ca_cert_path_already_exists') def errback(failure): raise NetworkError(failure.getErrorMessage()) uri = self._get_ca_cert_uri() mkdir_p(os.path.split(path)[0]) # We don't validate the TLS cert for this connection, # just check the fingerprint of the ca.cert d = downloadPage(uri, path) d.addCallback(self._reload_http_client) d.addErrback(errback) return d
def write_blob(self, user, blob_id, request, namespace=''): path = self._get_path(user, blob_id, namespace) try: mkdir_p(os.path.split(path)[0]) except OSError: pass if os.path.isfile(path): # 409 - Conflict request.setResponseCode(409) request.write("Blob already exists: %s" % blob_id) defer.returnValue(None) used = yield self.get_total_storage(user) if used > self.quota: logger.error("Error 507: Quota exceeded for user: %s" % user) request.setResponseCode(507) request.write('Quota Exceeded!') defer.returnValue(None) logger.info('writing blob: %s - %s' % (user, blob_id)) fbp = FileBodyProducer(request.content) yield fbp.startProducing(open(path, 'wb'))
def save(self, path_list): """ Saves the current configuration to disk. :param path_list: list of components that form the relative path to configuration. The absolute path will be calculated depending on the platform. :type path_list: list :return: True if saved to disk correctly, False otherwise """ config_path = os.path.join(self.get_path_prefix(), *(path_list[:-1])) mkdir_p(config_path) try: self._config_checker.serialize( os.path.join(config_path, path_list[-1])) except Exception as e: logger.warning("%s" % (e, )) raise return True
def maybe_download_provider_info(self, replace=False): """ Download the provider.json info from the main domain. This SHOULD only be used once with the DOMAIN url. """ # TODO handle pre-seeded providers? # or let client handle that? We could move them to bonafide. provider_json = self._get_provider_json_path() if is_file(provider_json) and not replace: return defer.succeed('provider_info_already_exists') folders, f = os.path.split(provider_json) mkdir_p(folders) uri = self._disco.get_provider_info_uri() met = self._disco.get_provider_info_method() d = downloadPage(uri, provider_json, method=met) d.addCallback(lambda _: self._load_provider_json()) d.addErrback(log.err) return d
def save(self, path_list): """ Saves the current configuration to disk. :param path_list: list of components that form the relative path to configuration. The absolute path will be calculated depending on the platform. :type path_list: list :return: True if saved to disk correctly, False otherwise """ config_path = os.path.join(self.get_path_prefix(), *(path_list[:-1])) mkdir_p(config_path) try: self._config_checker.serialize(os.path.join(config_path, path_list[-1])) except Exception as e: logger.warning("%s" % (e,)) raise return True
def _write_blob(): try: # limit the number of concurrent writes to disk yield self.semaphore.acquire() try: mkdir_p(os.path.split(path)[0]) except OSError as e: logger.warn( "Got exception trying to create directory: %r" % e) used = yield self.get_total_storage(user) length = producer.length / 1024.0 if used + length > self.quota: raise QuotaExceeded logger.info('writing blob: %s - %s' % (user, blob_id)) with open(path, 'wb') as blobfile: yield producer.startProducing(blobfile) used += length yield self._update_usage(user, used) finally: self.semaphore.release()
def __init__(self, path, key=None, user=None): dbname = '%s_blobs.db' % (user or 'soledad') self.path = os.path.abspath(os.path.join(path, dbname)) mkdir_p(os.path.dirname(self.path)) if not key: raise ValueError('key cannot be None') backend = 'pysqlcipher.dbapi2' opts = sqlcipher.SQLCipherOptions('/tmp/ignored', binascii.b2a_hex(key), is_raw_key=True, create=True) pragmafun = partial(pragmas.set_init_pragmas, opts=opts) openfun = _sqlcipherInitFactory(pragmafun) self.dbpool = ConnectionPool(backend, self.path, check_same_thread=False, timeout=5, cp_openfun=openfun, cp_min=2, cp_max=2, cp_name='blob_pool')
def maybe_create_and_get_certificates(basedir, name): """ Generate the needed ZMQ certificates for backend/frontend communication if needed. """ assert_zmq_has_curve() private_keys_dir = os.path.join(basedir, PRIVATE_KEYS_PREFIX) private_key = os.path.join( private_keys_dir, name + ".key_secret") if not os.path.isfile(private_key): mkdir_p(private_keys_dir) zmq.auth.create_certificates(private_keys_dir, name) # set permissions to: 0700 (U:rwx G:--- O:---) os.chmod(private_key, stat.S_IRUSR | stat.S_IWUSR) # move public key to public keys directory public_keys_dir = os.path.join(basedir, PUBLIC_KEYS_PREFIX) old_public_key = os.path.join( private_keys_dir, name + ".key") new_public_key = os.path.join( public_keys_dir, name + ".key") mkdir_p(public_keys_dir) shutil.move(old_public_key, new_public_key) return zmq.auth.load_certificate(private_key)
# from leap.bitmask.logs.streamtologger import StreamToLogger from leap.bitmask.platform_init import IS_WIN from leap.bitmask.util import get_path_prefix from leap.common.files import mkdir_p from PySide import QtCore import logbook from logbook.more import ColorizedStderrHandler from logbook.queues import ZeroMQSubscriber # NOTE: make sure that the folder exists, the logger is created before saving # settings on the first run. _base = os.path.join(get_path_prefix(), "leap") mkdir_p(_base) BITMASK_LOG_FILE = os.path.join(_base, 'bitmask.log') def get_logger(perform_rollover=False): """ Push to the app stack the needed handlers and return a Logger object. :rtype: logbook.Logger """ level = logbook.WARNING if flags.DEBUG: level = logbook.NOTSET # This handler consumes logs not handled by the others null_handler = logbook.NullHandler()
def download_client_cert(provider_config, path, session, kind="vpn"): """ Downloads the client certificate for each service. :param provider_config: instance of a ProviderConfig :type provider_config: ProviderConfig :param path: the path to download the cert to. :type path: str :param session: a fetcher.session instance. For the moment we only support requests.sessions :type session: requests.sessions.Session :param kind: the kind of certificate being requested. Valid values are "vpn" or "smtp". :type kind: string """ srp_auth = SRPAuth(provider_config) session_id = srp_auth.get_session_id() token = srp_auth.get_token() cookies = None if session_id is not None: cookies = {"_session_id": session_id} if kind == "vpn": cert_uri_template = "%s/%s/cert" method = "get" params = {} elif kind == "smtp": cert_uri_template = "%s/%s/smtp_cert" method = "post" params = {"address": srp_auth.get_username()} else: raise ValueError("Incorrect value passed to kind parameter") cert_uri = cert_uri_template % (provider_config.get_api_uri(), provider_config.get_api_version()) logger.debug("getting %s cert from uri: %s" % (kind, cert_uri)) headers = {} # API v2 will only support token auth, but in v1 we can send both if token is not None: headers["Authorization"] = "Token token={0}".format(token) call = getattr(session, method) res = call( cert_uri, verify=provider_config.get_ca_cert_path(), cookies=cookies, params=params, timeout=REQUEST_TIMEOUT, headers=headers, data=params, ) res.raise_for_status() client_cert = res.content if not leap_certs.is_valid_pemfile(client_cert): # XXX raise more specific exception. raise Exception("The downloaded certificate is not a " "valid PEM file") mkdir_p(os.path.dirname(path)) try: with open(path, "w") as f: f.write(client_cert) except IOError as exc: logger.error("Error saving client cert: %r" % (exc,)) raise check_and_fix_urw_only(path)
def _download_provider_info(self, *args): """ Downloads the provider.json defition """ leap_assert(self._domain, "Cannot download provider info without a domain") logger.debug("Downloading provider info for %r" % (self._domain)) # -------------------------------------------------------------- # TODO factor out with the download routines in services. # Watch out! We're handling the verify paramenter differently here. headers = {} domain = self._domain.encode(sys.getfilesystemencoding()) provider_json = os.path.join(util.get_path_prefix(), get_provider_path(domain)) if domain in PinnedProviders.domains() and \ not os.path.exists(provider_json): mkdir_p(os.path.join(os.path.dirname(provider_json), "keys", "ca")) cacert = os.path.join(os.path.dirname(provider_json), "keys", "ca", "cacert.pem") PinnedProviders.save_hardcoded(domain, provider_json, cacert) mtime = get_mtime(provider_json) if self._download_if_needed and mtime: headers['if-modified-since'] = mtime uri = "https://%s/%s" % (self._domain, "provider.json") verify = self.verify if mtime: # the provider.json exists # So, we're getting it from the api.* and checking against # the provider ca. try: provider_config = ProviderConfig() provider_config.load(provider_json) uri = provider_config.get_api_uri() + '/provider.json' verify = provider_config.get_ca_cert_path() except MissingCACert: # no ca? then download from main domain again. pass if verify: verify = verify.encode(sys.getfilesystemencoding()) logger.debug("Requesting for provider.json... " "uri: {0}, verify: {1}, headers: {2}".format( uri, verify, headers)) res = self._session.get(uri.encode('idna'), verify=verify, headers=headers, timeout=REQUEST_TIMEOUT) res.raise_for_status() logger.debug("Request status code: {0}".format(res.status_code)) min_client_version = res.headers.get(self.MIN_CLIENT_VERSION, '0') # Not modified if res.status_code == 304: logger.debug("Provider definition has not been modified") # -------------------------------------------------------------- # end refactor, more or less... # XXX Watch out, have to check the supported api yet. else: if flags.APP_VERSION_CHECK: # TODO split if not provider.supports_client(min_client_version): self._signaler.signal( self._signaler.prov_unsupported_client) raise UnsupportedClientVersionError() provider_definition, mtime = get_content(res) provider_config = ProviderConfig() provider_config.load(data=provider_definition, mtime=mtime) provider_config.save(["leap", "providers", domain, "provider.json"]) if flags.API_VERSION_CHECK: # TODO split api_version = provider_config.get_api_version() if provider.supports_api(api_version): logger.debug("Provider definition has been modified") else: api_supported = ', '.join(provider.SUPPORTED_APIS) error = ('Unsupported provider API version. ' 'Supported versions are: {0}. ' 'Found: {1}.').format(api_supported, api_version) logger.error(error) self._signaler.signal(self._signaler.prov_unsupported_api) raise UnsupportedProviderAPI(error)