def get_test_roots(self): """ Find test directories, skipping nested dirs and anything marked to skip under [pytest]:norecursedirs """ from path import path res = [] no_recurse = [] cfg = config.get_pkg_cfg_parser() if cfg.has_section('pytest') and \ cfg.has_option('pytest', 'norecursedirs'): [no_recurse.extend(path.getcwd().glob(i)) for i in cfg.get('pytest', 'norecursedirs').split()] no_recurse = [i.abspath() for i in no_recurse] test_dirs = [i for i in path.getcwd().walkdirs(CONFIG.test_dirname)] test_dirs.sort(key=len) for i in test_dirs: try: for j in res + no_recurse: if i.startswith(j): raise ValueError except ValueError: pass else: res.append(i) log.debug("Test roots: {0}".format(res)) return res
def __init__(self, script=None, log=None): ''' By default batch scripts and job streams are written to working directory ''' self.scriptDir = path(script or path.getcwd()).abspath() self.logDir = path(log or path.getcwd()).abspath() self.scriptDir.mkdir_p() self.logDir.mkdir_p() self.uid = uuid.uuid4().get_hex()[:self.uid_length]
def __init__(self, ctx, interpreter, properties, params, iskill = False,\ Popen = subprocess.Popen, callback_cast = omero.grid.ProcessCallbackPrx.uncheckedCast,\ omero_home = path.getcwd()): """ Popen and callback_Cast are primarily for testing. """ omero.util.SimpleServant.__init__(self, ctx) self.omero_home = omero_home #: Location for OMERO_HOME/lib/python self.interpreter = interpreter #: Executable which will be used on the script self.properties = properties #: Properties used to create an Ice.Config self.params = params #: JobParams for this script. Possibly None if a ParseJob self.iskill = iskill #: Whether or not, cleanup should kill the session self.Popen = Popen #: Function which should be used for creating processes self.callback_cast = callback_cast #: Function used to cast all ProcessCallback proxies # Non arguments (mutable state) self.rcode = None #: return code from popen self.callbacks = {} #: dictionary from id strings to callback proxies self.popen = None #: process. if None, then this instance isn't alive. self.pid = None #: pid of the process. Once set, isn't nulled. self.started = None #: time the process started self.stopped = None #: time of deactivation self.final_status = None #: status which will be sent on set_job_status # Non arguments (immutable state) self.uuid = properties["omero.user"] #: session this instance is tied to # More fields set by these methods self.make_files() self.make_env() self.make_config() self.logger.info("Created %s in %s" % (self.uuid, self.dir))
def __call__(self): # install package reference in trigger file # in place directory of target # XXX: Should this map multiline to "-r", self.entity spec = self.entity.text().strip() target = self.target_file.dirname() log.debug("pip installing {} as {}".format(spec, target)) cwd = path.getcwd() with utils.tempdir() as temp_dir: # We do this dance so we don't have # to guess package and .egg file names # we move everything in the tempdir to the target # and track it for later use in sign() utils.Process(("pip", "install", "-t", temp_dir, spec)).throw_on_error()() dirs = temp_dir.listdir() self._tracked = [] for d in dirs: rp = d.relpath(temp_dir) dst = cwd / target / rp if dst.exists(): if dst.isdir(): dst.rmtree_p() elif dst.isfile(): dst.remove() if not target.exists(): target.makedirs_p() logging.debug("Installer moving {} to {}".format(d, dst)) d.move(dst) self._tracked.append(dst)
def get_fname(self): fname_sch = str(self.arguments[0]) cwd = path.getcwd() os.chdir(path(get_src(self)).parent) fname_sch = path(fname_sch).expand().abspath() os.chdir(cwd) return fname_sch
def finalize_options(self): from path import path # Where the tests are found self.test_dir = path.getcwd() / 'tests' # Where in the build tree they're going build_py = self.get_finalized_command('build_py') self.build_lib = path(build_py.build_lib) self.dest_dir = self.build_lib / CONFIG.test_egg_namespace / \ (self.distribution.get_name().replace('.', '/')) # This stops the regular bdist builder from triggering, allowing us to # craft our own package underneath the regular build tree self.skip_build = True # Adjust the package metadata to suit our test package self.old_name = self.distribution.metadata.name self.distribution.metadata.name = 'test.%s' % self.distribution.metadata.name self.distribution.namespace_packages = [CONFIG.test_egg_namespace] + \ ['%s.%s' % (CONFIG.test_egg_namespace, i) for i in self.distribution.namespace_packages] self.distribution.entry_points = {} # Set the install requirements to be the test requirements of the original, # plus a direct pin to the original version. self.old_version = self.get_finalized_command('egg_info').egg_version self.distribution.install_requires = self.distribution.tests_require + \ ['%s==%s' % (self.old_name, self.old_version)] _bdist_egg.finalize_options(self)
def run(self): self.fetch_build_eggs(["gcovr"]) self.execute(self.clean_gcov_base, (), "Cleaning gcov directory") interpreter_filename, uses_cython = self.build_gcov_interpreter() log.info("built interpreter: {0}".format(interpreter_filename)) self.execute(self.fetch_requirements, [], "Fetching test requirements") pytest_args, doctest_args = self.get_args() if self.doctest: self.execute(self.run_gcov_pytest, (doctest_args, interpreter_filename), "Running doctests") if self.all or self.unit or self.integration: self.execute(self.run_gcov_pytest, (pytest_args, interpreter_filename), "Running tests") if uses_cython: # cython emits #line references to 'cython_utility' that confuse gcovr log.info("Creating dummy cython_utility for gcovr") from path import path open(path.getcwd() / 'cython_utility', 'w').close() import re gcovr_args = [os.path.join(sys.exec_prefix, "bin", "gcovr"), self.gcov_base, "--root=.", "--exclude=" + re.escape(self.gcov_base) + "/.*", "--exclude=cython_utility", "--exclude=.*\.egg/.*"] self.spawn(gcovr_args) # tabular to stdout self.spawn(gcovr_args + ["--xml", "--output=" + self.gcov_coverage_file])
def init(args): from engineer import __file__ as package_file, version logger = logging.getLogger('engineer.engine.init') sample_site_path = path(package_file).dirname() / 'sample_site' target = path.getcwd() if target.listdir() and not args.force: logger.warning("Target folder %s is not empty." % target) exit() elif args.force: logger.info("Deleting folder contents.") try: for item in target.dirs(): item.rmtree() for item in target.files(): item.remove() except Exception as e: logger.error("Couldn't delete folder contents - aborting.") logger.exception(e) exit() from engineer.util import mirror_folder, ensure_exists if args.no_sample: ensure_exists(target / 'posts') (sample_site_path / 'config.yaml').copyfile(target / 'config.yaml') else: mirror_folder(sample_site_path, target) logger.console("Initialization complete.") exit()
def build_config_path(script_name, scope='local'): """ <public> Build the path to the config file. :param str script_name: The name of the script/program, usually `__file__` :param bool scope: Whether the config file is in cwd (or $HOME) :param str dir_path: Pointer to parent dir of the config file, overrides 'scope' (optional) """ # Figure out the default .rc config script_name rc_name = '.{}rc'.format(path(script_name).basename().replace('.py', '')) if scope == 'local': # Save absolute path to the current directory dir_path = path.getcwd() elif scope == 'global': # Assume 'global', meaning the config file is placed in the home dir. dir_path = path('~').expanduser() else: # Check if scope if defined as a directory dir_path = path(scope) # The path needs to be an existing directory path if not dir_path.isdir(): raise ValueError("'{}' must be either 'local', 'global' or an" "existing directory path.".format(scope)) return os.path.join(dir_path, rc_name)
def __init__(self, ctx, interpreter, properties, params, iskill = False,\ Popen = subprocess.Popen, callback_cast = omero.grid.ProcessCallbackPrx.uncheckedCast,\ omero_home = path.getcwd()): """ Popen and callback_Cast are primarily for testing. """ omero.util.SimpleServant.__init__(self, ctx) self.omero_home = omero_home #: Location for OMERO_HOME/lib/python self.interpreter = interpreter #: Executable which will be used on the script self.properties = properties #: Properties used to create an Ice.Config self.params = params #: JobParams for this script. Possibly None if a ParseJob self.iskill = iskill #: Whether or not, cleanup should kill the session self.Popen = Popen #: Function which should be used for creating processes self.callback_cast = callback_cast #: Function used to cast all ProcessCallback proxies # Non arguments (mutable state) self.rcode = None #: return code from popen self.callbacks = {} #: dictionary from id strings to callback proxies self.popen = None #: process. if None, then this instance isn't alive. self.pid = None #: pid of the process. Once set, isn't nulled. self.started = None #: time the process started self.stopped = None #: time of deactivation self.final_status = None #: status which will be sent on set_job_status # Non arguments (immutable state) self.uuid = properties[ "omero.user"] #: session this instance is tied to # More fields set by these methods self.make_files() self.make_env() self.make_config() self.logger.info("Created %s in %s" % (self.uuid, self.dir))
def test_start(): if os.path.exists(SUITE_ENVS_DIR): shutil.rmtree(SUITE_ENVS_DIR) with open(os.environ[TEST_SUITES_PATH]) as f: suites_yaml = yaml.load(f.read()) variables = suites_yaml.get("variables", {}) build_docker_image() envs_dir = path.getcwd() / SUITE_ENVS_DIR test_suites = [ TestSuite(suite_name, suite_def, envs_dir / suite_name, variables) for suite_name, suite_def in suites_yaml["test_suites"].iteritems() ] scheduler = SuitesScheduler( test_suites, suites_yaml["handler_configurations"], scheduling_interval=SCHEDULER_INTERVAL, optimize=True, after_suite_callback=copy_xunit_report, suite_timeout=60 * 60 * 5, ) scheduler.run() return scheduler
def test_start(): if os.path.exists(SUITE_ENVS_DIR): shutil.rmtree(SUITE_ENVS_DIR) with open(os.environ[TEST_SUITES_PATH]) as f: suites_yaml = yaml.load(f.read()) variables = suites_yaml.get('variables', {}) build_docker_image() envs_dir = path.getcwd() / SUITE_ENVS_DIR test_suites = [ TestSuite(suite_name, suite_def, envs_dir / suite_name, variables) for suite_name, suite_def in suites_yaml['test_suites'].iteritems() ] scheduler = SuitesScheduler(test_suites, suites_yaml['handler_configurations'], scheduling_interval=SCHEDULER_INTERVAL, optimize=True, after_suite_callback=copy_xunit_report, suite_timeout=60 * 60 * 5) scheduler.run() return scheduler
def __init__(self, ctx, needs_session = True, use_session = None, accepts_list = None, cfg = None, omero_home = path.getcwd(), category = None): if accepts_list is None: accepts_list = [] self.category = category #: Category to be used w/ ProcessI self.omero_home = omero_home # Extensions for user-mode processors (ticket:1672) self.use_session = use_session """ If set, this session will be returned from internal_session and the "needs_session" setting ignored. """ if self.use_session: needs_session = False self.accepts_list = accepts_list """ A list of contexts which will be accepted by this user-mode processor. """ omero.util.Servant.__init__(self, ctx, needs_session = needs_session) if cfg is None: self.cfg = os.path.join(omero_home, "etc", "ice.config") self.cfg = os.path.abspath(self.cfg) else: self.cfg = cfg # Keep this session alive until the processor is finished self.resources.add( UseSessionHolder(use_session) )
def __call__(self): # install package reference in trigger file # in place directory of target # XXX: Should this map multiline to "-r", self.entity spec = self.entity.text().strip() target = self.target_file.dirname() log.debug("pip installing {} as {}".format(spec, target)) cwd = path.getcwd() with utils.tempdir() as temp_dir: # We do this dance so we don't have # to guess package and .egg file names # we move everything in the tempdir to the target # and track it for later use in sign() utils.Process( ("pip", "install", "-t", temp_dir, spec)).throw_on_error()() dirs = temp_dir.listdir() self._tracked = [] for d in dirs: rp = d.relpath(temp_dir) dst = cwd / target / rp if dst.exists(): if dst.isdir(): dst.rmtree_p() elif dst.isfile(): dst.remove() if not target.exists(): target.makedirs_p() logging.debug("Installer moving {} to {}".format(d, dst)) d.move(dst) self._tracked.append(dst)
def test_linote_checkdir(self): """Linote checkdir""" self.linote.checkdir("/xxx/yyy/zzz").should.be_false() path("/xxx/yyy/zzz").exists().should.be_false() existing_path = path.getcwd().joinpath('testingdir') self.linote.checkdir(existing_path).should.be_true() path(existing_path).exists().should.be_true() path(existing_path).rmdir_p()
def testMethods(self): # .abspath() self.assertEqual(path(os.curdir).abspath(), os.getcwd()) # .getcwd() cwd = path.getcwd() self.assert_(isinstance(cwd, path)) self.assertEqual(cwd, os.getcwd())
def do_lsl(self, dir=""): """List all files in a local directory. :param dir: (Optional) Path to run ls on, uses current working directory if omitted. """ if not dir: dir = path.getcwd() pretty_ls([str(x) for x in path(dir).files()])
def __init__(self, basedir=path.getcwd(), missing_log=None): self.basedir = basedir self.highlight_mimetypes = dict(self.highlight_mimetypes) self.mimetypes = mimetypes.MimeTypes() self._fix_mimetypes() self.hl_formatter = pygments.formatters.HtmlFormatter() self.hl_style_defs = self._hl_base_style + \ self.hl_formatter.get_style_defs('.highlight') self._missing_log = missing_log
def _create(self, sql_directory, db_vers, db_patch, password_hash, args, location=None): sql_directory = self._sql_directory(db_vers, db_patch) if not sql_directory.exists(): self.ctx.die(2, "Invalid Database version/patch: %s does not exist" % sql_directory) if args and args.file: output = args.file script = "<filename here>" else: script = "%s__%s.sql" % (db_vers, db_patch) location = path.getcwd() / script output = open(location, "w") self.ctx.out("Saving to " + location) try: dbprofile = self._db_profile() header = sql_directory / ("%s-header.sql" % dbprofile) footer = sql_directory / ("%s-footer.sql" % dbprofile) if header.exists(): # 73 multiple DB support. OMERO 4.3+ cfg = {"TIME": time.ctime(time.time()), "DIR": sql_directory, "SCRIPT": script} self._copy(header, output, str, cfg) self._copy(sql_directory / "schema.sql", output, str) self._copy(sql_directory / "views.sql", output, str) self._copy(footer, output, self._make_replace(password_hash, db_vers, db_patch), cfg) else: # OMERO 4.2.x and before output.write( """ -- -- GENERATED %s from %s -- -- This file was created by the bin/omero db script command -- and contains an MD5 version of your OMERO root users's password. -- You should think about deleting it as soon as possible. -- -- To create your database: -- -- createdb omero -- createlang plpgsql omero -- psql omero < %s -- BEGIN; """ % (time.ctime(time.time()), sql_directory, script) ) self._copy(sql_directory / "schema.sql", output, str) self._copy(sql_directory / "data.sql", output, self._make_replace(password_hash, db_vers, db_patch)) self._copy(sql_directory / "views.sql", output, str) output.write("COMMIT;\n") finally: output.flush() if output != sys.stdout: output.close()
def set_profile(profile): if profile == "test" or profile == "dev": client_plugin_path = path.getcwd() / ".." / "bespinclient" / "plugins" c.plugin_path = [dict(name="supported", path=client_plugin_path / "supported"), dict(name="thirdparty", path=client_plugin_path / "thirdparty"), dict(name="labs", path=client_plugin_path / "labs"), dict(name="boot", path=client_plugin_path / "boot", skip_unless_only=True)] if profile == "test": # this import will install the bespin_test store c.dburl = "sqlite://" c.fsroot = os.path.abspath("%s/../testfiles" % os.path.dirname(__file__)) c.async_jobs = False c.mobwrite_implementation = "MobwriteInProcess" c.fslevels = 0 elif profile == "dev": c.dburl = "sqlite:///%s" % (os.path.abspath("devdata.db")) c.fsroot = os.path.abspath("%s/../devfiles" % os.path.dirname(__file__)) root_log = logging.getLogger() root_log.setLevel(logging.DEBUG) file_handler = logging.handlers.RotatingFileHandler(c.log_file) file_handler.setFormatter(logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")) root_log.addHandler(file_handler) paste_log = logging.getLogger("paste.httpserver.ThreadPool") paste_log.setLevel(logging.ERROR) # turn off the secure cookie, because localhost connections # will be HTTP c.secure_cookie = False # turn off current_domain_cookie because of the development proxy setup c.current_domain_cookie = False c.use_uuid_as_dir_identifier = False c.default_quota = 10000 c.log_requests_to_stdout = True c.log_to_stdout = True c.mobwrite_implementation = "MobwriteInProcess" c.async_jobs = False c.fslevels = 0 c.base_url = "http://localhost:8080/" c.email_host = None c.vcs_timeout = -1 c.test_plugin_path = [dict(name="testing", path=client_plugin_path / "testing")] c.plugin_path.extend(c.test_plugin_path) c.server_base_url = "/"
def run(self): logger.info("Creating environment for suite: {0}".format(self.suite_name)) self.create_env() logger.info("Starting suite in docker container: {0}".format(self.suite_name)) cwd = path.getcwd() try: os.chdir(self.suite_work_dir) vagrant.up().wait() self.process = vagrant("docker-logs", f=True, _bg=True).process finally: os.chdir(cwd)
def collectstatic(dry_run, input): """ Collect static assets for deployment. This intentionally has the same call signature as Django's collectstatic command, so that Heroku's Python buildpack will call it automatically. """ # if we were deployed without node.js support, raise error try: sp.check_call(["which", "node"], stdout=sp.PIPE) except sp.CalledProcessError: raise RuntimeError("cannot collectstatic; node is not installed") if dry_run: # do nothing -- this is just the Python buildpack checking if we # support collectstatic return static_dir = path.getcwd() / "seamless_karma" / "static" sp.call(["../../node_modules/bower/bin/bower", "install"], cwd=static_dir) sp.call(["./node_modules/requirejs/bin/r.js", "-o", "amd.build.js"]) # copy optimized.js based on hash of contents optimized_js = static_dir / "scripts" / "optimized.js" text = optimized_js.text() hash = hashlib.md5(text).hexdigest()[0:8] optimized_hash = optimized_js.parent / "optimized.{}.js".format(hash) # if we have a sourcemap comment, rewrite it text = text.replace( "//# sourceMappingURL=optimized.js.map", "//# sourceMappingURL=optimized.{}.js.map".format(hash) ) print("Copying {src} to {dest}".format( src=optimized_js.name, dest=optimized_hash.name)) optimized_hash.write_text(text) # if we have a sourcemap, copy it too sourcemap = optimized_js + ".map" if sourcemap.exists(): sourcemap_hash = optimized_hash + ".map" print("Copying {src} to {dest}".format( src=sourcemap.name, dest=sourcemap_hash.name)) sourcemap.copy(sourcemap_hash) # update optimized.latest.js symlink latest_js = optimized_hash.parent / "optimized.latest.js" if latest_js.exists(): latest_js.remove() print("Updating {name} symlink".format(name=latest_js.name)) os.symlink(optimized_hash.name, latest_js) # and latest sourcemap symlink latest_sourcemap = latest_js + ".map" if latest_sourcemap.exists(): latest_sourcemap.remove() if sourcemap.exists(): print("Updating {name} symlink".format(name=latest_sourcemap.name)) os.symlink(sourcemap_hash.name, latest_sourcemap)
def test_import_ex01(): #for example 01 only import importlib import sys sys.path.append("/home/weigl/workspace/msml/src/") #msml sys.path.append(path.getcwd()/"gen") sq = importlib.import_module("simple") #load square print "ADD:", sq.add(2, 3), "==5" print "SQR:", sq.square(2), "==4" print
def wget(url, directory='.'): """this will download a file with wget optionally into a path of your choosing, by default it's in the current directory. """ here = path.getcwd() if directory.relpath() == '.': directory = here os.chdir(directory.dirname()) cmd = ('wget', url) subprocess.call(cmd) os.chdir(here)
def collectstatic(dry_run, input): """ Collect static assets for deployment. This intentionally has the same call signature as Django's collectstatic command, so that Heroku's Python buildpack will call it automatically. """ # if we were deployed without node.js support, raise error try: sp.check_call(["which", "node"], stdout=sp.PIPE) except sp.CalledProcessError: raise RuntimeError("cannot collectstatic; node is not installed") if dry_run: # do nothing -- this is just the Python buildpack checking if we # support collectstatic return static_dir = path.getcwd() / "seamless_karma" / "static" sp.call(["../../node_modules/bower/bin/bower", "install"], cwd=static_dir) sp.call(["./node_modules/requirejs/bin/r.js", "-o", "amd.build.js"]) # copy optimized.js based on hash of contents optimized_js = static_dir / "scripts" / "optimized.js" text = optimized_js.text() hash = hashlib.md5(text).hexdigest()[0:8] optimized_hash = optimized_js.parent / "optimized.{}.js".format(hash) # if we have a sourcemap comment, rewrite it text = text.replace( "//# sourceMappingURL=optimized.js.map", "//# sourceMappingURL=optimized.{}.js.map".format(hash)) print("Copying {src} to {dest}".format(src=optimized_js.name, dest=optimized_hash.name)) optimized_hash.write_text(text) # if we have a sourcemap, copy it too sourcemap = optimized_js + ".map" if sourcemap.exists(): sourcemap_hash = optimized_hash + ".map" print("Copying {src} to {dest}".format(src=sourcemap.name, dest=sourcemap_hash.name)) sourcemap.copy(sourcemap_hash) # update optimized.latest.js symlink latest_js = optimized_hash.parent / "optimized.latest.js" if latest_js.exists(): latest_js.remove() print("Updating {name} symlink".format(name=latest_js.name)) os.symlink(optimized_hash.name, latest_js) # and latest sourcemap symlink latest_sourcemap = latest_js + ".map" if latest_sourcemap.exists(): latest_sourcemap.remove() if sourcemap.exists(): print("Updating {name} symlink".format(name=latest_sourcemap.name)) os.symlink(sourcemap_hash.name, latest_sourcemap)
def run(self): logger.info('Creating environment for suite: {0}'.format( self.suite_name)) self.create_env() logger.info('Starting suite in docker container: {0}'.format( self.suite_name)) cwd = path.getcwd() try: os.chdir(self.suite_work_dir) vagrant.up().wait() self.process = vagrant('docker-logs', f=True, _bg=True).process finally: os.chdir(cwd)
def __call__(self): # install package reference in trigger file # in place directory of target # XXX: Should this map multiline to "-r", self.entity spec = self.entity.text().strip() target = self.target_file.dirname() log.debug("pip installing {} as {}".format( spec, target)) cwd = path.getcwd() with utils.tempdir() as temp_dir: # We do this dance so we don't have # to guess package and .egg file names # we move everything in the tempdir to the target # and track it for later use in sign() localenv = os.environ.copy() localenv['PYTHONUSERBASE'] = temp_dir utils.Process(("pip", "install", "--user", "--ignore-installed", spec), env=localenv).throw_on_error()() self._tracked = [] # We now manage two classes of explicit mappings # When python packages are installed into a prefix # we know that bin/* should map to <charmdir>/bin/ # and lib/python*/site-packages/* should map to # <target>/* src_paths = ["bin/*", "lib/python*/site-packages/*"] temp_dir = path(temp_dir) for p in src_paths: for d in temp_dir.glob(p): if not d.exists(): continue bp = d.relpath(temp_dir) if bp.startswith("bin/"): dst = self.target / bp elif bp.startswith("lib"): dst = cwd / target / d.name else: dst = cwd / target / bp if dst.exists(): if dst.isdir(): dst.rmtree_p() elif dst.isfile(): dst.remove() if not dst.parent.exists(): dst.parent.makedirs_p() log.debug("Installer moving {} to {}".format(d, dst)) d.move(dst) self._tracked.append(dst)
def usermode_processor(client, serverid="UsermodeProcessor", cfg=None, accepts_list=None, stop_event=None, omero_home=path.getcwd()): """ Creates and activates a usermode processor for the given client. It is the responsibility of the client to call "cleanup()" on the ProcessorI implementation which is returned. cfg is the path to an --Ice.Config-valid file or files. If none is given, the value of ICE_CONFIG will be taken from the environment if available. Otherwise, all properties will be taken from the client instance. accepts_list is the list of IObject instances which will be passed to omero.api.IScripts.validateScript. If none is given, only the current Experimenter's own object will be passed. stop_event is an threading.Event. One will be acquired from omero.util.concurrency.get_event if none is provided. """ if cfg is None: cfg = os.environ.get("ICE_CONFIG") if accepts_list is None: uid = client.sf.getAdminService().getEventContext().userId accepts_list = [omero.model.ExperimenterI(uid, False)] if stop_event is None: stop_event = omero.util.concurrency.get_event(name="UsermodeProcessor") id = Ice.Identity() id.name = "%s-%s" % (serverid, uuid.uuid4()) id.category = client.getCategory() ctx = omero.util.ServerContext(serverid, client.ic, stop_event) impl = omero.processor.ProcessorI(ctx, use_session=client.sf, accepts_list=accepts_list, cfg=cfg, omero_home=omero_home, category=id.category) ctx.add_servant(client.adapter, impl, ice_identity=id) return impl
def __call__(self): # install package reference in trigger file # in place directory of target # XXX: Should this map multiline to "-r", self.entity spec = self.entity.text().strip() target = self.target_file.dirname() log.debug("pip installing {} as {}".format(spec, target)) cwd = path.getcwd() with utils.tempdir() as temp_dir: # We do this dance so we don't have # to guess package and .egg file names # we move everything in the tempdir to the target # and track it for later use in sign() localenv = os.environ.copy() localenv['PYTHONUSERBASE'] = temp_dir utils.Process( ("pip", "install", "--user", "--ignore-installed", spec), env=localenv).throw_on_error()() self._tracked = [] # We now manage two classes of explicit mappings # When python packages are installed into a prefix # we know that bin/* should map to <charmdir>/bin/ # and lib/python*/site-packages/* should map to # <target>/* src_paths = ["bin/*", "lib/python*/site-packages/*"] temp_dir = path(temp_dir) for p in src_paths: for d in temp_dir.glob(p): if not d.exists(): continue bp = d.relpath(temp_dir) if bp.startswith("bin/"): dst = self.target / bp elif bp.startswith("lib"): dst = cwd / target / d.name else: dst = cwd / target / bp if dst.exists(): if dst.isdir(): dst.rmtree_p() elif dst.isfile(): dst.remove() if not dst.parent.exists(): dst.parent.makedirs_p() log.debug("Installer moving {} to {}".format(d, dst)) d.move(dst) self._tracked.append(dst)
def __init__(self, basedir=path.getcwd(), sleep_interval=DEFAULT_SLEEP, foreground=False): self.updates_list = [] self.basedir = basedir self.foreground = foreground self.sleep_interval = sleep_interval self.listfile = self.basedir/'.git-updates' self.pidfile = self.basedir/'.git-updates.pid' if not self.foreground: self.stdout = \ (self.basedir/'.git'/'logs'/'updates.log').open('a+') self.stderr = \ (self.basedir/'.git'/'logs'/'updates-errors.log').open('a+') else: self.stdout = sys.stdout self.stderr = sys.stderr
def run(self): datapath = self.dbpath / 'data' if not datapath.isdir(): datapath.mkdir_p() def sigterm(signum, frame): self.proc.terminate() sys.exit() signal.signal(signal.SIGTERM, sigterm) cmd = ['mongod'] cmd.append('--dbpath={0}'.format(datapath.relpath())) cmd.append('--logpath={0}'.format(self.dbpath.relpath() / 'log')) self.proc = Popen(cmd, cwd=path.getcwd()) self.proc.communicate()
def __call__(self, argv): parser = self.get_parser() args, extras = parser.parse_known_args(args=argv) # Load configuration. self.parse_config(args.config) # XXX load other actions based on definitions from config. if args.action not in self.actions: # Unknown action or --help. return parser.print_help() # Load config fom a specific app, if defined, or use default one. self.app = args.app or self.config.get('tipfy', 'app') # Fallback to the tipfy section. self.config_section = ['tipfy'] if self.app: self.config_section.insert(0, self.app) # If app is set, an 'app' value can be used in expansions. if self.app: self.config.set('DEFAULT', 'app', self.app) # Prepend configured paths to sys.path, if any. sys.path[:0] = self.config.getlist(self.config_section, 'sys.path', []) # Current cwd and app paths. self.cwd_path = path.getcwd() if self.app: default_app_path = self.cwd_path.joinpath(self.app) self.app_path = path( self.config.get(self.config_section, 'path', default_app_path)).abspath() else: self.app_path = None if args.help: # Delegate help to action. extras.append('--help') action = self.actions[args.action](self, args.action) return action(extras)
def __call__(self, argv): parser = self.get_parser() args, extras = parser.parse_known_args(args=argv) # Load configuration. self.parse_config(args.config) # XXX load other actions based on definitions from config. if args.action not in self.actions: # Unknown action or --help. return parser.print_help() # Load config fom a specific app, if defined, or use default one. self.app = args.app or self.config.get('tipfy', 'app') # Fallback to the tipfy section. self.config_section = ['tipfy'] if self.app: self.config_section.insert(0, self.app) # If app is set, an 'app' value can be used in expansions. if self.app: self.config.set('DEFAULT', 'app', self.app) # Prepend configured paths to sys.path, if any. sys.path[:0] = self.config.getlist(self.config_section, 'sys.path', []) # Current cwd and app paths. self.cwd_path = path.getcwd() if self.app: default_app_path = self.cwd_path.joinpath(self.app) self.app_path = path(self.config.get(self.config_section, 'path', default_app_path)).abspath() else: self.app_path = None if args.help: # Delegate help to action. extras.append('--help') action = self.actions[args.action](self, args.action) return action(extras)
def __init__(self, ctx, needs_session=True, use_session=None, accepts_list=None, cfg=None, omero_home=path.getcwd(), category=None): if accepts_list is None: accepts_list = [] self.category = category #: Category to be used w/ ProcessI self.omero_home = omero_home # Extensions for user-mode processors (ticket:1672) self.use_session = use_session """ If set, this session will be returned from internal_session and the "needs_session" setting ignored. """ if self.use_session: needs_session = False self.accepts_list = accepts_list """ A list of contexts which will be accepted by this user-mode processor. """ omero.util.Servant.__init__(self, ctx, needs_session=needs_session) if cfg is None: self.cfg = os.path.join(omero_home, "etc", "ice.config") self.cfg = os.path.abspath(self.cfg) else: self.cfg = cfg # Keep this session alive until the processor is finished self.resources.add(UseSessionHolder(use_session))
def usermode_processor(client, serverid = "UsermodeProcessor",\ cfg = None, accepts_list = None, stop_event = None,\ omero_home = path.getcwd()): """ Creates and activates a usermode processor for the given client. It is the responsibility of the client to call "cleanup()" on the ProcessorI implementation which is returned. cfg is the path to an --Ice.Config-valid file or files. If none is given, the value of ICE_CONFIG will be taken from the environment if available. Otherwise, all properties will be taken from the client instance. accepts_list is the list of IObject instances which will be passed to omero.api.IScripts.validateScript. If none is given, only the current Experimenter's own object will be passed. stop_event is an threading.Event. One will be acquired from omero.util.concurrency.get_event if none is provided. """ if cfg is None: cfg = os.environ.get("ICE_CONFIG") if accepts_list is None: uid = client.sf.getAdminService().getEventContext().userId accepts_list = [omero.model.ExperimenterI(uid, False)] if stop_event is None: stop_event = omero.util.concurrency.get_event(name="UsermodeProcessor") id = Ice.Identity() id.name = "%s-%s" % (serverid, uuid.uuid4()) id.category = client.getCategory() ctx = omero.util.ServerContext(serverid, client.ic, stop_event) impl = omero.processor.ProcessorI(ctx, use_session=client.sf, accepts_list=accepts_list, cfg=cfg, omero_home = omero_home, category=id.category) ctx.add_servant(client.adapter, impl, ice_identity=id) return impl
def initialize_options(self): from path import path self.hudson = False if os.environ.get('BUILD_TAG', '').startswith('hudson') or \ os.environ.get('BUILD_TAG', '').startswith('jenkins'): self.hudson = True self.pylint_options = [] self.all = True self.unit = False self.integration = False self.regression = False self.doctest = False self.args = None self.subprocess = False self.pdb = False self.quiet = False self.ignore = None self.no_pylint = False self.test_root = path.getcwd() / 'tests' self.file = None self.default_options = self.get_option_list()
def test_pbs_submit(tmpdirs): script_dir, log_dir = tmpdirs submit = pbs.Submitter(script=script_dir, log=log_dir) fp = submit.submit_job('ls', name='test', hold='testA') script = open(fp, 'r').read() assert fp == path(script_dir).joinpath( submit.script_name_join.join(['test', submit.uid])) assert re.match('^#!/bin/sh$', script, re.MULTILINE) assert re.search(r'^#PBS -N test$', script, re.MULTILINE) #workdir should default to current working directory assert re.search('^#PBS -d {0}$'.format(path.getcwd()), script, re.MULTILINE) assert re.search('^#PBS -e %s$' % log_dir, script, re.MULTILINE) assert re.search('^#PBS -o %s$' % log_dir, script, re.MULTILINE) assert re.search('^#PBS -V$', script, re.MULTILINE) assert re.search(r'^ls$', script, re.MULTILINE) #job named testA has not be submitted so we shouldn't hold on it assert not re.search(r'^#PBS -W$', script, re.MULTILINE)
def test_pbs_submit(tmpdirs): script_dir, log_dir = tmpdirs submit = pbs.Submitter(script=script_dir, log=log_dir) fp = submit.submit_job('ls', name='test', hold='testA') script = open(fp, 'r').read() assert fp == path(script_dir).joinpath(submit.script_name_join.join( ['test', submit.uid])) assert re.match('^#!/bin/sh$', script, re.MULTILINE) assert re.search(r'^#PBS -N test$', script, re.MULTILINE) #workdir should default to current working directory assert re.search('^#PBS -d {0}$'.format(path.getcwd()), script, re.MULTILINE) assert re.search('^#PBS -e %s$' % log_dir, script, re.MULTILINE) assert re.search('^#PBS -o %s$' % log_dir, script, re.MULTILINE) assert re.search('^#PBS -V$', script, re.MULTILINE) assert re.search(r'^ls$', script, re.MULTILINE) #job named testA has not be submitted so we shouldn't hold on it assert not re.search(r'^#PBS -W$', script, re.MULTILINE)
def cmdline(args=sys.argv): # bootstrap logging bootstrap() # Load all plugins load_plugins() args = get_argparser().parse_args(args[1:]) skip_settings = ('init', ) logger = logging.getLogger('engineer') if args.verbose >= 2: logger.removeHandler(get_console_handler(logging.WARNING)) logger.addHandler(get_console_handler(logging.DEBUG)) elif args.verbose == 1: logger.removeHandler(get_console_handler(logging.WARNING)) logger.addHandler(get_console_handler(logging.INFO)) else: pass # WARNING level is added by default in bootstrap method if args.parser_name in skip_settings: pass else: # try loading settings try: from engineer.conf import settings if args.config_file is None: default_settings_file = path.getcwd() / 'config.yaml' logger.info( "No '--settings' parameter specified, defaulting to %s." % default_settings_file) settings.reload(default_settings_file) else: settings.reload(settings_file=args.config_file) except Exception as e: logger.error(e.message) exit() args.func(args) exit()
def cmdline(args=sys.argv): # bootstrap logging bootstrap() # Load all plugins load_plugins() args = get_argparser().parse_args(args[1:]) skip_settings = ('init',) logger = logging.getLogger('engineer') if args.verbose >= 2: logger.removeHandler(get_console_handler(logging.WARNING)) logger.addHandler(get_console_handler(logging.DEBUG)) elif args.verbose == 1: logger.removeHandler(get_console_handler(logging.WARNING)) logger.addHandler(get_console_handler(logging.INFO)) else: pass # WARNING level is added by default in bootstrap method if args.parser_name in skip_settings: pass else: # try loading settings try: from engineer.conf import settings if args.config_file is None: default_settings_file = path.getcwd() / 'config.yaml' logger.info("No '--settings' parameter specified, defaulting to %s." % default_settings_file) settings.reload(default_settings_file) else: settings.reload(settings_file=args.config_file) except Exception as e: logger.error(e.message) exit() args.func(args) exit()
def make_new_album(from_dir): #currently, no error handling frd = path(from_dir) d = raw_input("What is the album's name? ") try: os.mkdir(path.joinpath('photos', d)) os.chdir(path.joinpath('photos', d)) cwd = path.getcwd() included = [] for f in frd.files(): commands.getoutput('eog %s' % f) ans = raw_input('include this file in the album? (y/[n]) ') if ans.lower() == 'y' or ans.lower() == 'yes': cmt = raw_input('picture caption: (blank for none)') path.copy(f, cwd) n = f.name.replace(f.name[-4:], '.thumb' + f.name[-4:]) thumb = path.joinpath(cwd, n) path.copy(f, thumb) cur_pic = path.joinpath(cwd, f.name) included.append(Photo(cur_pic, thumb, cmt)) im = resize_image(Image.open(thumb), (200, 150)) im.save(file(thumb, 'w')) return included
def _initialize(self, config): self._check_deprecated_settings(config) self.ENGINEER = EngineerConfiguration._EngineerConstants() # CONTENT DIRECTORIES self.SETTINGS_DIR = path( config.pop( 'SETTINGS_DIR', self.SETTINGS_FILE.dirname().abspath() if self.SETTINGS_FILE is not None else path.getcwd())) self.CONTENT_DIR = self.normalize(config.pop('CONTENT_DIR', 'content')) self.POST_DIR = self.normalize_list(config.pop('POST_DIR', 'posts')) self.OUTPUT_DIR = self.normalize(config.pop('OUTPUT_DIR', 'output')) self.TEMPLATE_DIR = self.normalize( config.pop('TEMPLATE_DIR', 'templates')) self.TEMPLATE_PAGE_DIR = config.pop( 'TEMPLATE_PAGE_DIR', (self.TEMPLATE_DIR / 'pages').abspath()) self.LOG_DIR = self.normalize(config.pop('LOG_DIR', 'logs')) if self.SETTINGS_FILE is None: self.LOG_FILE = config.pop('LOG_FILE', (self.LOG_DIR / 'build.log').abspath()) else: self.LOG_FILE = config.pop( 'LOG_FILE', (self.LOG_DIR / ('%s-%s.log' % (datetime.now().strftime('%m.%d_%H.%M.%S'), self.SETTINGS_FILE.name))).abspath()) self.CACHE_DIR = config.pop('CACHE_DIR', None) if self.CACHE_DIR is None: if self.SETTINGS_FILE is not None: self.CACHE_DIR = self.normalize('_cache/%s' % self.SETTINGS_FILE.name) else: self.CACHE_DIR = self.normalize('_cache/None') else: self.CACHE_DIR = self.normalize(self.CACHE_DIR) self.CACHE_FILE = config.pop( 'CACHE_FILE', (self.CACHE_DIR / 'engineer.cache').abspath()) self.OUTPUT_CACHE_DIR = config.pop( 'OUTPUT_CACHE_DIR', (self.CACHE_DIR / 'output_cache').abspath()) self.JINJA_CACHE_DIR = config.pop( 'JINJA_CACHE_DIR', (self.CACHE_DIR / 'jinja_cache').abspath()) self.BUILD_STATS_FILE = config.pop('BUILD_STATS_FILE', (self.CACHE_DIR / 'build_stats.cache').abspath()) # PLUGINS self.PLUGINS = self.normalize_list(config.pop('PLUGINS', None)) if self.PLUGINS is not None: for plugin in self.PLUGINS: __import__(plugin) # THEMES self.THEME_DIRS = self.normalize_list(config.pop('THEME_DIRS', None)) self.THEME_FINDERS = [ 'engineer.finders.ThemeDirsFinder', 'engineer.finders.SiteFinder', 'engineer.finders.PluginFinder', 'engineer.finders.DefaultFinder' ] self.THEME_SETTINGS = config.pop('THEME_SETTINGS', {}) self.THEME = config.pop('THEME', 'dark_rainbow') # PREPROCESSOR / COMPRESSOR SETTINGS self.COMPRESSOR_ENABLED = config.pop('COMPRESSOR_ENABLED', True) self.COMPRESSOR_FILE_EXTENSIONS = config.pop( 'COMPRESSOR_FILE_EXTENSIONS', ['js', 'css']) self.PREPROCESS_LESS = config.pop('PREPROCESS_LESS', True) if not 'LESS_PREPROCESSOR' in config: if platform.system() == 'Windows': self.LESS_PREPROCESSOR = str( self.ENGINEER.ROOT_DIR / 'lib/dotless/dotless.Compiler.exe') + ' {infile} {outfile}' else: self.LESS_PREPROCESSOR = 'lessc {infile} {outfile}' else: self.LESS_PREPROCESSOR = path(config.pop('LESS_PREPROCESSOR')) # SITE SETTINGS self.SITE_TITLE = config.pop('SITE_TITLE', 'SITE_TITLE') self.SITE_URL = config.pop('SITE_URL', 'SITE_URL') self.SITE_AUTHOR = config.pop('SITE_AUTHOR', None) self.HOME_URL = config.pop('HOME_URL', '/') # HOME_URL must end with a slash if not self.HOME_URL.endswith('/'): self.HOME_URL += '/' self.STATIC_URL = config.pop('STATIC_URL', urljoin(self.HOME_URL, 'static')) # starting in version 0.5, the default permalink style will change to 'pretty' permalink_setting = config.pop('PERMALINK_STYLE', None) if permalink_setting is None: self.PERMALINK_STYLE = permalink_styles['fulldate'] else: self.PERMALINK_STYLE = permalink_styles.get( permalink_setting, permalink_setting) self.ROLLUP_PAGE_SIZE = int(config.pop('ROLLUP_PAGE_SIZE', 5)) # RSS FEED SETTINGS self.FEED_TITLE = config.pop('FEED_TITLE', self.SITE_TITLE + ' Feed') self.FEED_ITEM_LIMIT = config.pop('FEED_ITEM_LIMIT', self.ROLLUP_PAGE_SIZE) self.FEED_DESCRIPTION = config.pop( 'FEED_DESCRIPTION', 'The %s most recent posts from %s.' % (self.FEED_ITEM_LIMIT, self.SITE_URL)) self.FEED_URL = config.pop('FEED_URL', urljoin(self.HOME_URL, 'feeds/rss.xml')) # These 'constants' are updated here so they're relative to the STATIC_URL value self.ENGINEER.FOUNDATION_CSS_URL = urljoin(self.STATIC_URL, 'engineer/lib/foundation/') self.ENGINEER.JQUERY_URL = urljoin(self.STATIC_URL, 'engineer/lib/jquery-1.7.1.min.js') self.ENGINEER.MODERNIZR_URL = urljoin( self.STATIC_URL, 'engineer/lib/modernizr-2.5.3.min.js') self.ENGINEER.LESS_JS_URL = urljoin(self.STATIC_URL, 'engineer/lib/less-1.3.1.min.js') self.ENGINEER.TWEET_URL = urljoin( self.STATIC_URL, 'engineer/lib/tweet/tweet/jquery.tweet.js') # URL helper functions def page(num): page_path = urljoin('page', str(num)) return urljoin(self.HOME_URL, page_path) def tag(name): page_path = urljoin('tag', slugify(name)) page_path = urljoin(self.HOME_URL, page_path) return page_path self.URLS = { 'home': self.HOME_URL, 'archives': urljoin(self.HOME_URL, 'archives'), 'feed': self.FEED_URL, 'listpage': page, 'tag': tag, } # Update URLs from the config setting if they're present self.URLS.update(config.pop('URLS', {})) # MISCELLANEOUS SETTINGS self.ACTIVE_NAV_CLASS = config.pop('ACTIVE_NAV_CLASS', 'current') self.DEBUG = config.pop('DEBUG', False) #self.DISABLE_CACHE = config.pop('DISABLE_CACHE', False) self.PUBLISH_DRAFTS = config.pop('PUBLISH_DRAFTS', False) self.PUBLISH_PENDING = config.pop('PUBLISH_PENDING', False) self.PUBLISH_REVIEW = config.pop('PUBLISH_REVIEW', False) self.POST_TIMEZONE = pytz.timezone(config.pop('POST_TIMEZONE', 'UTC')) self.SERVER_TIMEZONE = self.POST_TIMEZONE if config.get( 'SERVER_TIMEZONE', None) is None else config.pop('SERVER_TIMEZONE') self.TIME_FORMAT = config.pop( 'TIME_FORMAT', '%I:%M %p %A, %B %d, %Y %Z') # '%Y-%m-%d %H:%M:%S %Z%z' # Let plugins deal with their settings in their own way if needed for plugin_type in get_all_plugin_types(): for plugin in plugin_type.plugins: logger.debug( "Calling handle_settings on plugin: %s. config dict is: %s" % (plugin, config)) config = plugin.handle_settings(config, self) # Pull any remaining settings in the config and set them as attributes on the settings object for k, v in config.iteritems(): setattr(self, k, v)
def add_local_bin_to_path(): local_bin = path.getcwd() / "vendor" / "node" / "bin" paths = os.environ['PATH'].split(":") if not local_bin in paths: paths.insert(0, local_bin) os.environ['PATH'] = ":".join(paths)
def _add_workDir(self, flag_list, **kwargs): ''' Defaults to current working directory ''' self.__run_format_method(flag_list, self.format_workDir, (kwargs.get('workDir') or path.getcwd()))
def _create(self, sql_directory, db_vers, db_patch, password_hash, args, location=None): sql_directory = self._sql_directory(db_vers, db_patch) if not sql_directory.exists(): self.ctx.die(2, "Invalid Database version/patch: %s does not" " exist" % sql_directory) if args and args.file: output = args.file script = "<filename here>" else: script = "%s__%s.sql" % (db_vers, db_patch) location = path.getcwd() / script output = open(location, 'w') self.ctx.out("Saving to " + location) try: dbprofile = self._db_profile() header = sql_directory / ("%s-header.sql" % dbprofile) footer = sql_directory / ("%s-footer.sql" % dbprofile) if header.exists(): # 73 multiple DB support. OMERO 4.3+ cfg = { "TIME": time.ctime(time.time()), "DIR": sql_directory, "SCRIPT": script} self._copy(header, output, str, cfg) self._copy(sql_directory/"schema.sql", output, str) self._copy(sql_directory/"views.sql", output, str) self._copy( footer, output, self._make_replace(password_hash, db_vers, db_patch), cfg) else: # OMERO 4.2.x and before output.write(""" -- -- GENERATED %s from %s -- -- This file was created by the bin/omero db script command -- and contains an MD5 version of your OMERO root users's password. -- You should think about deleting it as soon as possible. -- -- To create your database: -- -- createdb omero -- psql omero < %s -- BEGIN; """ % (time.ctime(time.time()), sql_directory, script)) self._copy(sql_directory/"schema.sql", output, str) self._copy( sql_directory/"data.sql", output, self._make_replace(password_hash, db_vers, db_patch)) self._copy(sql_directory/"views.sql", output, str) output.write("COMMIT;\n") finally: output.flush() if output != sys.stdout: output.close()
'MTQ', 'MRT', 'MUS', 'MYT', 'MEX', 'FSM', 'MDA', 'MCO', 'MNG', 'MTN', 'MSR', 'MAR', 'MOZ', 'MMR', 'NAM', 'NRU', 'NPL', 'NLD', 'ANT', 'NCL', 'NZL', 'NIC', 'NER', 'NGA', 'NIU', 'NFK', 'MNP', 'NOR', 'PSE', 'OMN', 'PAK', 'PLW', 'PAN', 'PNG', 'PRY', 'PER', 'PHL', 'PCN', 'POL', 'PRT', 'PRI', 'QAT', 'REU', 'ROM', 'RUS', 'RWA', 'SHN', 'KNA', 'LCA', 'SPM', 'VCT', 'WSM', 'SMR', 'STP', 'SAU', 'SEN', 'SRB', 'SYC', 'SLE', 'SGP', 'SVK', 'SVN', 'SLB', 'SOM', 'ZAF', 'ESP', 'LKA', 'SDN', 'SUR', 'SJM', 'SWZ', 'SWE', 'CHE', 'SYR', 'TJK', 'TZA', 'THA', 'TGO', 'TKL', 'TON', 'TTO', 'TUN', 'TUR', 'TKM', 'TCA', 'TUV', 'UGA', 'UKR', 'ARE', 'GBR', 'USA', 'VIR', 'URY', 'UZB', 'VUT', 'VEN', 'VNM', 'WLF', 'ESH', 'YEM', 'ZMB', 'ZWE' ] quad_codes = ['2', '3'] filepaths = path.getcwd().files('*.reduced.txt') output = list() for path in filepaths: data = open(path, 'r') print 'Just read in the %s data...' % path for line in data: line = line.replace('\n', '') split_line = line.split('\t') condition1 = split_line[1][0:3] == 'USA' condition2 = split_line[2][0:3] != 'USA' condition3 = split_line[2][0:3] in allActors condition4 = split_line[4] in quad_codes try: if all([condition1, condition2, condition3, condition4]): output.append(split_line)
def __getattr__(self, attr): try: return self[attr] except KeyError: raise AttributeError("%s not found" % attr) def __setattr__(self, attr, value): self[attr] = value c = Bunch() c.dburl = None c.db_pool_size = 10 c.db_pool_overflow = 10 c.secret = "This is the phrase that is used for secret stuff." c.pw_secret = "This phrase encrypts passwords." c.static_dir = path.getcwd() / ".." / "bespinclient" / "tmp" / "static" c.plugin_path = [] c.loader_name = "bespin.tiki" c.template_file_dir = None c.docs_dir = os.path.abspath("%s/../../../docs" % os.path.dirname(__file__)) c.log_file = os.path.abspath("%s/../devserver.log" % os.path.dirname(__file__)) c.default_quota = 15 c.secure_cookie = True c.http_only_cookie = True c.current_domain_cookie = True c.template_path = [path(__file__).dirname().abspath()] c.base_url = "https://bespin.mozilla.com/"