def handle_noargs(self, **kwargs): try: verbose = int(kwargs['verbosity']) > 0 except (KeyError, TypeError, ValueError): verbose = True for release in DocumentRelease.objects.all(): if verbose: print "Updating %s..." % release destdir = Path(settings.DOCS_BUILD_ROOT).child(release.lang, release.version) if not destdir.exists(): destdir.mkdir(parents=True) # Make an SCM checkout/update into the destination directory. # Do this dynamically in case we add other SCM later. getattr(self, 'update_%s' % release.scm)(release.scm_url, destdir) # Make the directory for the JSON files - sphinx-build doesn't # do it for us, apparently. json_build_dir = destdir.child('_build', 'json') if not json_build_dir.exists(): json_build_dir.mkdir(parents=True) # "Shell out" (not exactly, but basically) to sphinx-build. sphinx.cmdline.main(['sphinx-build', '-b', 'json', # Use the JSON builder '-q', # Be vewy qwiet destdir, # Source file directory json_build_dir, # Destination directory ])
def create_chapters(vers, release_date=None, comments_open=True): """ Create a bunch of ``Chapter`` objects for a given ``BookVersion``. """ c = pysvn.Client() for record in c.ls(vers.svn_root): name = Path(record["name"]).name if name.ext != ".txt": continue parts = publish_html(c.cat(record["name"])) title = re.sub(r"(?i)^(chapter|appendix)\s+[A-Z0-9]+:\s*", "", parts["title"]) if name.startswith("appendix"): Chapter.objects.get_or_create(type="A", number=ord(name.stem[-1]) - ord('A') + 1, title=title, version=vers, defaults=dict( release_date=release_date, comments_open=comments_open, )) elif name.startswith("chapter"): Chapter.objects.get_or_create(type="C", number=int(name.stem[-2:]), title=title, version=vers, defaults=dict( release_date=release_date, comments_open=comments_open, ))
def document(request, lang, version, url): if lang != 'en' or version != 'dev': raise Http404() docroot = Path(settings.DOCS_PICKLE_ROOT) # First look for <bits>/index.fpickle, then for <bits>.fpickle bits = url.strip('/').split('/') + ['index.fpickle'] doc = docroot.child(*bits) if not doc.exists(): bits = bits[:-2] + ['%s.fpickle' % bits[-2]] doc = docroot.child(*bits) if not doc.exists(): raise Http404("'%s' does not exist" % doc) bits[-1] = bits[-1].replace('.fpickle', '') template_names = [ 'docs/%s.html' % '-'.join([b for b in bits if b]), 'docs/doc.html' ] return render_to_response(template_names, RequestContext(request, { 'doc': pickle.load(open(doc, 'rb')), 'env': pickle.load(open(docroot.child('globalcontext.pickle'), 'rb')), 'update_date': datetime.datetime.fromtimestamp(docroot.child('last_build').mtime()), 'home': urlresolvers.reverse('document-index', kwargs={'lang':lang, 'version':version}), 'search': urlresolvers.reverse('document-search', kwargs={'lang':lang, 'version':version}), 'redirect_from': request.GET.get('from', None), }))
def handle_noargs(self, **options): project_dir = Path(__file__).absolute().ancestor(4) data_file = project_dir.child("static", "js", "station_polys.json") with open(data_file, "r") as f: data = json.loads(f.read()) for station_id, coords in data.items(): station = Station.objects.get(id=station_id) station.polygon = coords station.save()
def run_benchmarks(control, benchmark_dir, benchmarks, trials, record_dir=None, profile_dir=None): if benchmarks: print "Running benchmarks: %s" % " ".join(benchmarks) else: print "Running all benchmarks" if record_dir: record_dir = Path(record_dir).expand().absolute() if not record_dir.exists(): raise ValueError('Recording directory "%s" does not exist' % record_dir) print "Recording data to '%s'" % record_dir control_label = get_django_version(control, vcs=None) branch_info = "" print "Benchmarking: Django %s (in %s%s)" % (control_label, branch_info, control) print " Control: %s" % cpython print " Experiment: %s" % pypy print control_env = {'PYTHONPATH': '.:%s:%s' % (Path(control).absolute(), Path(benchmark_dir))} for benchmark in discover_benchmarks(benchmark_dir): if not benchmarks or benchmark.name in benchmarks: print "Running '%s' benchmark ..." % benchmark.name settings_mod = '%s.settings' % benchmark.name control_env['DJANGO_SETTINGS_MODULE'] = settings_mod experiment_env = control_env.copy() if profile_dir is not None: control_env['DJANGOBENCH_PROFILE_FILE'] = Path(profile_dir, "cpython-%s" % benchmark.name) experiment_env['DJANGOBENCH_PROFILE_FILE'] = Path(profile_dir, "pypy-%s" % benchmark.name) try: control_data = run_benchmark(benchmark, trials, control_env, cpython) experiment_data = run_benchmark(benchmark, trials, experiment_env, pypy) except SkipBenchmark, reason: print "Skipped: %s\n" % reason continue options = argparse.Namespace( track_memory = False, diff_instrumentation = False, benchmark_name = benchmark.name, disable_timelines = True, ) result = perf.CompareBenchmarkData(control_data, experiment_data, options) if record_dir: record_benchmark_results( dest = record_dir.child('%s.json' % benchmark.name), name = benchmark.name, result = result, control = 'cpython', experiment = 'pypy', control_data = control_data, experiment_data = experiment_data, ) print format_benchmark_result(result, len(control_data.runtimes)) print
def do_build(self, arguments): # If no course arguments are given, build all the courses. if arguments['<course>']: try: courses = [self.library.courses[c] for c in arguments['<course>']] except KeyError: raise CLIError("No such course:", c) else: courses = self.library.courses.values() for course in courses: self.out.write(u'Building %s\n' % course.title) # Make the dest directory. dest = self.library.build_path.child(course.slug) if dest.exists(): dest.rmtree() dest.mkdir(parents=True) # Create the sphinx support directories (_static, _templates) by # merging directories from the internal chucks-support directory # and from the library's theme if it exists. This has to happen # before building the handounts and Sphinx docs because both those # steps uses these components. for subdir in ('_static', '_templates'): chucksdir = self.support_path.child(subdir) themedir = self.library.theme_path.child(subdir) sources = [d for d in (chucksdir, themedir) if d.exists()] if not dest.child(subdir).exists(): dest.child(subdir).mkdir() fileutils.merge_trees(sources, dest.child(subdir)) # Write out an auth.json for the deployment step. This should # probably actually become part of the deployment step at some point. if hasattr(course, 'auth'): json.dump(course.auth, open(dest.child('auth.json'), 'w')) # Handouts have to go first: Sphinx links to the handouts. self._build_handouts(course) self._build_sphinx(course) # Copy over any extra files to be downloaded. FIXME: This is a nasty # hack. Inject these into toc.html as download links? for fname in getattr(course, 'downloads', []): p = Path(fname) if p.isabsolute(): src = p else: src = self.library.path.child(*p.components()) shutil.copy(src, dest.child('html'))
def require_dir(config, key, create_if_missing=False): from unipath import FSPath as Path try: dir = config[key] except KeyError: msg = "config option '%s' missing" raise KeyError(msg % key) dir = Path(config[key]) if not dir.exists(): dir.mkdir(parents=True) if not dir.isdir(): msg = ("directory '%s' is missing or not a directory " "(from config option '%s')") tup = dir, key raise OSError(msg % tup)
def search(request, lang, version): if lang != 'en' or version != 'dev': raise Http404() docroot = Path(settings.DOCS_PICKLE_ROOT) # Remove the 'cof' GET variable from the query string so that the page # linked to by the Javascript fallback doesn't think its inside an iframe. mutable_get = request.GET.copy() if 'cof' in mutable_get: del mutable_get['cof'] return render_to_response('docs/search.html', RequestContext(request, { 'query': request.GET.get('q'), 'query_string': mutable_get.urlencode(), 'env': pickle.load(open(docroot.child('globalcontext.pickle'), 'rb')), 'home': urlresolvers.reverse('document-index', kwargs={'lang':lang, 'version':version}), 'search': urlresolvers.reverse('document-search', kwargs={'lang':lang, 'version':version}), }))
def get_django_version(loc, vcs=None): if vcs: switch_to_branch(vcs, loc, do_cleanup=True) pythonpath = Path.cwd() else: pythonpath = Path(loc).absolute() out, err, _ = perf.CallAndCaptureOutput( [sys.executable, "-c" "import django; print django.get_version()"], env={"PYTHONPATH": pythonpath} ) return out.strip()
def get_widgy_version(loc, vcs=None): if vcs: switch_to_branch(vcs, loc, do_cleanup=True) pythonpath = Path.cwd() else: pythonpath = Path(loc).absolute() out, err, _ = perf.CallAndCaptureOutput( [sys.executable, '-c' 'import widgy; print widgy.get_version()'], env = {'PYTHONPATH': pythonpath} ) return out.strip()
def get_widgy_version(loc, vcs=None): if vcs: switch_to_branch(vcs, loc, do_cleanup=True) pythonpath = Path.cwd() else: pythonpath = Path(loc).absolute() out, err, _ = perf.CallAndCaptureOutput( [sys.executable, '-c' 'import widgy; print widgy.get_version()'], env={'PYTHONPATH': pythonpath}) return out.strip()
def create_chapters(vers, release_date=None, comments_open=True): """ Create a bunch of ``Chapter`` objects for a given ``BookVersion``. """ c = pysvn.Client() for record in c.ls(vers.svn_root): name = Path(record["name"]).name if name.ext != ".txt": continue parts = publish_html(c.cat(record["name"])) title = re.sub(r"(?i)^(chapter|appendix)\s+[A-Z0-9]+:\s*", "", parts["title"]) if name.startswith("appendix"): Chapter.objects.get_or_create( type = "A", number = ord(name.stem[-1]) - ord('A') + 1, title = title, version = vers, defaults = dict( release_date = release_date, comments_open = comments_open, ) ) elif name.startswith("chapter"): Chapter.objects.get_or_create( type = "C", number = int(name.stem[-2:]), title = title, version = vers, defaults = dict( release_date = release_date, comments_open = comments_open, ) )
def dict2dir(dir, dic, mode="w"): dir = FSPath(dir) if not dir.exists(): dir.mkdir() for filename, content in dic.iteritems(): p = FSPath(dir, filename) if isinstance(content, dict): dict2dir(p, content) continue f = open(p, mode) f.write(content) f.close()
def _make_cli(tmpdir, request): """ Set up a CLI instance pointing to a tmpdir for a library. This can't be in the body of cli() itself because we want to call it both from the cli fixture and from TestBuild.build. """ cli = TestCLI( stdout = io.StringIO(), stderr = io.StringIO(), libpath = str(tmpdir), ) # Chdir to the temp dir, remembering to restore when we're done. oldcwd = Path.cwd() os.chdir(str(tmpdir)) request.addfinalizer(lambda: os.chdir(oldcwd)) return cli
# Django settings for okqa project. import os from unipath import FSPath as Path import dj_database_url PROJECT_DIR = Path(__file__).absolute().ancestor(3) DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = () MANAGERS = ADMINS INTERNAL_IPS = ('127.0.0.1',) # TODO: test if the next line is good for us # SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' TIME_ZONE = 'Asia/Jerusalem' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html gettext = lambda s: s LANGUAGES = ( ('he', gettext('Hebrew')), ('en', gettext('English')), ('ar', gettext('Arabic')), ('ru', gettext('Russian')), ) LANGUAGE_CODE = LANGUAGES[0][0]
import os from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS as TCP from unipath import FSPath as Path BASE = Path(__file__).absolute().ancestor(2) TIME_ZONE = 'America/New_York' LANGUAGE_CODE = 'en-us' SITE_ID = 1 USE_I18N = True USE_L10N = True USE_TZ = True MEDIA_ROOT = BASE.child('media') MEDIA_URL = '/m/' SOUTH_TESTS_MIGRATE = False STATIC_ROOT = BASE.ancestor(1).child('static_root') STATIC_URL = '/static/' STATICFILES_DIRS = ( BASE.ancestor(1).child('wsgi').child('static'), )
Generated by 'django-admin startproject' using Django 1.10.4. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ from unipath import FSPath as Path import os import json # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) BASE = Path(__file__).absolute().ancestor(2) if os.path.exists(BASE.child('_env').child('env-prod.json')): env_path = BASE.child('_env').child('env-prod.json') elif os.path.exists(BASE.child('_env').child('env-dev.json')): env_path = BASE.child('_env').child('env-dev.json') else: env_path = BASE.child('_env').child('env.json') print "Using env file: %s" % env_path with open(env_path) as handle: ENV = json.load(handle) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
def handle_noargs(self, **kwargs): try: verbosity = int(kwargs['verbosity']) except (KeyError, TypeError, ValueError): verbosity = 1 builders = ['json', 'html'] # Somehow, bizarely, there's a bug in Sphinx such that if I try to # build 1.0 before other versions, things fail in weird ways. However, # building newer versions first works. I suspect Sphinx is hanging onto # some global state. Anyway, we can work around it by making sure that # "dev" builds before "1.0". This is ugly, but oh well. for release in DocumentRelease.objects.order_by('-version'): if verbosity >= 1: print "Updating %s..." % release # checkout_dir is shared for all languages. checkout_dir = Path(settings.DOCS_BUILD_ROOT).child(release.version) parent_build_dir = Path(settings.DOCS_BUILD_ROOT).child(release.lang, release.version) if not checkout_dir.exists(): checkout_dir.mkdir(parents=True) if not parent_build_dir.exists(): parent_build_dir.mkdir(parents=True) # # Update the release from SCM. # # Make an SCM checkout/update into the destination directory. # Do this dynamically in case we add other SCM later. getattr(self, 'update_%s' % release.scm)(release.scm_url, checkout_dir) if release.docs_subdir: source_dir = checkout_dir.child(*release.docs_subdir.split('/')) else: source_dir = checkout_dir if release.lang != 'en': scm_url = release.scm_url.replace('django.git', 'django-docs-translations.git') trans_dir = checkout_dir.child('django-docs-translation') if not trans_dir.exists(): trans_dir.mkdir() getattr(self, 'update_%s' % release.scm)(scm_url, trans_dir) if not source_dir.child('locale').exists(): source_dir.child('locale').write_link(trans_dir.child('translations')) subprocess.call("cd %s && make translations" % trans_dir, shell=True) if release.is_default: # Build the pot files (later retrieved by Transifex) builders.append('gettext') # # Use Sphinx to build the release docs into JSON and HTML documents. # for builder in builders: # Wipe and re-create the build directory. See #18930. build_dir = parent_build_dir.child('_build', builder) if build_dir.exists(): shutil.rmtree(build_dir) build_dir.mkdir(parents=True) if verbosity >= 2: print " building %s (%s -> %s)" % (builder, source_dir, build_dir) subprocess.call(['sphinx-build', '-b', builder, '-D', 'language=%s' % release.lang, '-q', # Be vewy qwiet source_dir, # Source file directory build_dir, # Destination directory ]) # # Create a zip file of the HTML build for offline reading. # This gets moved into MEDIA_ROOT for downloading. # html_build_dir = parent_build_dir.child('_build', 'html') zipfile_name = 'django-docs-%s-%s.zip' % (release.version, release.lang) zipfile_path = Path(settings.MEDIA_ROOT).child('docs', zipfile_name) if not zipfile_path.parent.exists(): zipfile_path.parent.mkdir(parents=True) if verbosity >= 2: print " build zip (into %s)" % zipfile_path def zipfile_inclusion_filter(f): return f.isfile() and '.doctrees' not in f.components() with closing(zipfile.ZipFile(zipfile_path, 'w')) as zf: for f in html_build_dir.walk(filter=zipfile_inclusion_filter): zf.write(f, html_build_dir.rel_path_to(f)) # # Copy the build results to the directory used for serving # the documentation in the least disruptive way possible. # build_dir = parent_build_dir.child('_build') built_dir = parent_build_dir.child('_built') subprocess.check_call(['rsync', '--archive', '--delete', '--link-dest=' + build_dir, build_dir + '/', built_dir]) # # Rebuild the imported document list and search index. # if not kwargs['reindex']: continue if verbosity >= 2: print " reindexing..." # Build a dict of {path_fragment: document_object}. We'll pop values # out of this dict as we go which'll make sure we know which # remaining documents need to be deleted (and unindexed) later on. documents = dict((doc.path, doc) for doc in release.documents.all()) # Walk the tree we've just built looking for ".fjson" documents # (just JSON, but Sphinx names them weirdly). Each one of those # documents gets a corresponding Document object created which # we'll then ask Sphinx to reindex. # # We have to be a bit careful to reverse-engineer the correct # relative path component, especially for "index" documents, # otherwise the search results will be incorrect. json_built_dir = parent_build_dir.child('_built', 'json') for built_doc in json_built_dir.walk(): if built_doc.isfile() and built_doc.ext == '.fjson': # Convert the built_doc path which is now an absolute # path (i.e. "/home/docs/en/1.2/_built/ref/models.json") # into a path component (i.e. "ref/models"). path = json_built_dir.rel_path_to(built_doc) if path.stem == 'index': path = path.parent path = str(path.parent.child(path.stem)) # Read out the content and create a new Document object for # it. We'll strip the HTML tags here (for want of a better # place to do it). with open(built_doc) as fp: json_doc = json.load(fp) try: json_doc['body'] # Just to make sure it exists. title = unescape_entities(strip_tags(json_doc['title'])) except KeyError, ex: if verbosity >= 2: print "Skipping: %s (no %s)" % (path, ex.args[0]) continue doc = documents.pop(path, Document(path=path, release=release)) doc.title = title doc.save() haystack.site.update_object(doc) # Clean up any remaining documents. for doc in documents.values(): if verbosity >= 2: print "Deleting:", doc haystack.site.remove_object(doc) doc.delete()
def handle_noargs(self, **kwargs): try: verbosity = int(kwargs['verbosity']) except (KeyError, TypeError, ValueError): verbosity = 1 # Somehow, bizarely, there's a bug in Sphinx such that if I try to # build 1.0 before other versions, things fail in weird ways. However, # building newer versions first works. I suspect Sphinx is hanging onto # some global state. Anyway, we can work around it by making sure that # "dev" builds before "1.0". This is ugly, but oh well. for release in DocumentRelease.objects.order_by('-version'): if verbosity >= 1: print "Updating %s..." % release # checkout_dir is shared for all languages. checkout_dir = Path(settings.DOCS_BUILD_ROOT).child( release.version) parent_build_dir = Path(settings.DOCS_BUILD_ROOT).child( release.lang, release.version) if not checkout_dir.exists(): checkout_dir.mkdir(parents=True) if not parent_build_dir.exists(): parent_build_dir.mkdir(parents=True) # # Update the release from SCM. # # Make an SCM checkout/update into the destination directory. # Do this dynamically in case we add other SCM later. getattr(self, 'update_%s' % release.scm)(release.scm_url, checkout_dir) if release.docs_subdir: source_dir = checkout_dir.child( *release.docs_subdir.split('/')) else: source_dir = checkout_dir if release.lang != 'en': scm_url = release.scm_url.replace( 'django.git', 'django-docs-translations.git') trans_dir = checkout_dir.child('django-docs-translation') if not trans_dir.exists(): trans_dir.mkdir() getattr(self, 'update_%s' % release.scm)(scm_url, trans_dir) if not source_dir.child('locale').exists(): source_dir.child('locale').write_link( trans_dir.child('translations')) subprocess.call("cd %s && make translations" % trans_dir, shell=True) # # Use Sphinx to build the release docs into JSON and HTML documents. # for builder in ('json', 'html'): # Wipe and re-create the build directory. See #18930. build_dir = parent_build_dir.child('_build', builder) if build_dir.exists(): shutil.rmtree(build_dir) build_dir.mkdir(parents=True) # "Shell out" (not exactly, but basically) to sphinx-build. if verbosity >= 2: print " building %s (%s -> %s)" % (builder, source_dir, build_dir) sphinx.cmdline.main([ 'sphinx-build', '-b', builder, '-D', 'language=%s' % release.lang, '-q', # Be vewy qwiet source_dir, # Source file directory build_dir, # Destination directory ]) # # Create a zip file of the HTML build for offline reading. # This gets moved into MEDIA_ROOT for downloading. # html_build_dir = parent_build_dir.child('_build', 'html') zipfile_name = 'django-docs-%s-%s.zip' % (release.version, release.lang) zipfile_path = Path(settings.MEDIA_ROOT).child( 'docs', zipfile_name) if not zipfile_path.parent.exists(): zipfile_path.parent.mkdir(parents=True) if verbosity >= 2: print " build zip (into %s)" % zipfile_path def zipfile_inclusion_filter(f): return f.isfile() and '.doctrees' not in f.components() with closing(zipfile.ZipFile(zipfile_path, 'w')) as zf: for f in html_build_dir.walk(filter=zipfile_inclusion_filter): zf.write(f, html_build_dir.rel_path_to(f)) # # Copy the build results to the directory used for serving # the documentation in the least disruptive way possible. # build_dir = parent_build_dir.child('_build') built_dir = parent_build_dir.child('_built') subprocess.check_call([ 'rsync', '--archive', '--delete', '--link-dest=' + build_dir, build_dir + '/', built_dir ]) # # Rebuild the imported document list and search index. # if not kwargs['reindex']: continue if verbosity >= 2: print " reindexing..." # Build a dict of {path_fragment: document_object}. We'll pop values # out of this dict as we go which'll make sure we know which # remaining documents need to be deleted (and unindexed) later on. documents = dict( (doc.path, doc) for doc in release.documents.all()) # Walk the tree we've just built looking for ".fjson" documents # (just JSON, but Sphinx names them weirdly). Each one of those # documents gets a corresponding Document object created which # we'll then ask Sphinx to reindex. # # We have to be a bit careful to reverse-engineer the correct # relative path component, especially for "index" documents, # otherwise the search results will be incorrect. json_built_dir = parent_build_dir.child('_built', 'json') for built_doc in json_built_dir.walk(): if built_doc.isfile() and built_doc.ext == '.fjson': # Convert the built_doc path which is now an absolute # path (i.e. "/home/docs/en/1.2/_built/ref/models.json") # into a path component (i.e. "ref/models"). path = json_built_dir.rel_path_to(built_doc) if path.stem == 'index': path = path.parent path = str(path.parent.child(path.stem)) # Read out the content and create a new Document object for # it. We'll strip the HTML tags here (for want of a better # place to do it). with open(built_doc) as fp: json_doc = json.load(fp) try: json_doc['body'] # Just to make sure it exists. title = unescape_entities( strip_tags(json_doc['title'])) except KeyError, ex: if verbosity >= 2: print "Skipping: %s (no %s)" % (path, ex.args[0]) continue doc = documents.pop(path, Document(path=path, release=release)) doc.title = title doc.save() haystack.site.update_object(doc) # Clean up any remaining documents. for doc in documents.values(): if verbosity >= 2: print "Deleting:", doc haystack.site.remove_object(doc) doc.delete()
# -*- coding: utf-8 -*- from unipath import FSPath as Path PROJECT_ROOT = Path(__file__).absolute().ancestor(2) DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": PROJECT_ROOT.child("dev.db"), "USER": "", "PASSWORD": "", "HOST": "", "PORT": "", } } # -- django-celery REDIS_CONNECT_RETRY = True REDIS_HOST = "localhost" REDIS_PORT = 6379 REDIS_DB = 0 BROKER_HOST = REDIS_HOST BROKER_PORT = REDIS_PORT BROKER_VHOST = REDIS_DB
# Settings for www.djangoproject.com import json from unipath import FSPath as Path # Utilities # The full path to the repository root. BASE = Path(__file__).absolute().ancestor(2) # It's a secret to everybody try: with open(BASE.ancestor(2).child('conf').child('secrets.json')) as handle: SECRETS = json.load(handle) except IOError: SECRETS = { 'secret_key': 'a', 'superfeedr_creds': ['*****@*****.**', 'some_string'], } # Django settings CACHE_MIDDLEWARE_SECONDS = 60 * 5 # 5 minutes CACHE_MIDDLEWARE_KEY_PREFIX = 'django' CSRF_COOKIE_HTTPONLY = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2',
# Settings for www.djangoproject.com import os import json import platform from unipath import FSPath as Path # The full path to the django_website directory. BASE = Path(__file__).absolute().ancestor(2) # Far too clever trick to know if we're running on the deployment server. PRODUCTION = ('DJANGOPROJECT_DEBUG' not in os.environ) and ("djangoproject" in platform.node()) # It's a secret to everybody SECRETS = json.load(open(BASE.ancestor(2).child('secrets.json'))) SECRET_KEY = str(SECRETS['secret_key']) # SUPERFEEDR_CREDS is a 2 element list in the form of [email,secretkey] SUPERFEEDR_CREDS = SECRETS.get('superfeedr_creds') ADMINS = (('Adrian Holovaty','*****@*****.**'),('Jacob Kaplan-Moss', '*****@*****.**')) MANAGERS = (('Jacob Kaplan-Moss','*****@*****.**'),) FEED_APPROVERS_GROUP_NAME = "feed-approver" TIME_ZONE = 'America/Chicago' SERVER_EMAIL = '*****@*****.**' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'djangoproject', 'USER': '******' },
# following PEP 386, versiontools will pick it up __version__ = (0, 1, 0, "final", 0) from unipath import FSPath as Path PROJECT_DIR = Path(__file__).absolute().ancestor(1) WORKER_BUNDLE = PROJECT_DIR.child("worker_bundle")
# Django settings for hos from unipath import FSPath as Path BASE_DIR = Path(__file__).absolute().ancestor(3) # Make this unique, and don't share it with anybody. from .secret_key import * DEBUG = False TEMPLATE_DEBUG = DEBUG TEST_RUNNER = 'django.test.runner.DiscoverRunner' ADMINS = ( # ('Your Name', '*****@*****.**'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'NAME': 'meta', 'ENGINE': 'django.db.backends.mysql', 'USER': '******', 'PASSWORD': '' }, } # Local timezone TIME_ZONE = 'Europe/Vienna'
from unipath import FSPath as Path BASE = Path(__file__).absolute().ancestor(2) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = (('Martin Brugnara', '*****@*****.**'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'rhok.db', 'USER': '******', 'PASSWORD': '******', 'HOST': '127.0.0.1', 'PORT': '', } } ALLOWED_HOSTS = [] TIME_ZONE = 'Europe/Rome' LANGUAGE_CODE = 'en-us' SITE_ID = 1 USE_I18N = False USE_L10N = False USE_TZ = True MEDIA_ROOT = BASE.child('media')
request.rdbconn.close() except AttributeError: close finally: return retval return wrapper ########NEW FILE######## __FILENAME__ = base import os import urlparse from unipath import FSPath as Path from django.core.exceptions import ImproperlyConfigured PROJECT_DIR = Path(__file__).absolute().ancestor(3) def get_env_variable(var_name): """ Get the environment variable or return an exception """ try: return os.environ[var_name] except KeyError: error_msg = "Set the {} environment variable".format(var_name) raise ImproperlyConfigured(error_msg) DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Idan Gazit', '*****@*****.**'),
# Django settings for hweb project. from unipath import FSPath as Path DEBUG = True TEMPLATE_DEBUG = DEBUG # The full path to the hweb directory. BASE = Path(__file__).absolute().ancestor(2) ADMINS = ( # ('Your Name', '*****@*****.**'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': BASE.child('hweb').child('hweb.db'), # Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } CACHES = { 'default' : { 'BACKEND' : 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION' : '127.0.0.1:11211', #'BACKEND' : 'django.core.cache.backends.locmem.LocMemCache',
For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os from unipath import FSPath as Path from envs import env # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PROJECT_PATH = Path(__file__).absolute().ancestor(1) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = env('SECRET_KEY','YourAppSucks323') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = env('DEBUG',False,var_type='boolean') ALLOWED_HOSTS = env('ALLOWED_HOSTS',['127.0.0.1','.execute-api.us-east-1.amazonaws.com'],var_type='list') CSRF_TRUSTED_ORIGINS = ALLOWED_HOSTS # Application definition
import os import urlparse from unipath import FSPath as Path from django.core.exceptions import ImproperlyConfigured PROJECT_DIR = Path(__file__).absolute().ancestor(3) def get_env_variable(var_name): """ Get the environment variable or return an exception """ try: return os.environ[var_name] except KeyError: error_msg = "Set the {} environment variable".format(var_name) raise ImproperlyConfigured(error_msg) DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = (('Idan Gazit', '*****@*****.**'), ) DATABASES = {} RETHINKDB_URL = urlparse.urlparse(get_env_variable('RETHINKDB_URL')) urlparse.uses_netloc.append('rethinkdb') RETHINK_CONNARGS = {} rethink_argmap = { 'hostname': 'host',
import os from unipath import FSPath as Path PROJECT_DIR = Path(__file__).absolute().ancestor(2) AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY') AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME') AWS_S3_CUSTOM_DOMAIN = os.environ.get('AWS_STORAGE_BUCKET_NAME') AWS_QUERYSTRING_AUTH = False AWS_S3_SECURE_URLS = False TILESERVER_URL = os.environ.get('TILESERVER_URL') DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = () MANAGERS = ADMINS INTERNAL_IPS = ('127.0.0.1',) SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' TIME_ZONE = 'Etc/UTC' USE_TZ = True LANGUAGE_CODE = 'en-us' SITE_ID = 1 USE_I18N = False USE_L10N = True MEDIA_ROOT = PROJECT_DIR.child('media') # the following line is a total lie
from unipath import FSPath as Path from datetime import timedelta import os PROJECT_DIR = Path(__file__).absolute().ancestor(2) PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."), ) DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = () MANAGERS = ADMINS ALLOWED_HOSTS = [] TIME_ZONE = 'Asia/Jerusalem' LANGUAGE_CODE = 'en-us' SITE_ID = 1 USE_I18N = True USE_L10N = True USE_TZ = True MEDIA_ROOT = '' MEDIA_URL = '' STATIC_ROOT = '' STATIC_URL = '/static/' STATICFILES_DIRS = (PROJECT_DIR.child('records', 'static'), )
from unipath import FSPath as Path # Helper lambda for gracefully degrading env variables. Taken from http://rdegges.com/devops-django-part-3-the-heroku-way env = lambda e, d: environ[e] if environ.has_key(e) else d # EventBrite API Info EB_APPKEY = env('EB_APPKEY', None) EB_USERKEY = env('EB_USERKEY', None) EB_OAUTHKEY = env('EB_OAUTHKEY', None) EB_EVENTID = env('EB_EVENTID', None) # Google Aanalytics Information GA_ID = env('GA_ID', None) GA_DOMAIN = env('SITE_DOMAIN', None) BASE = Path(__file__).absolute().ancestor(2) APP = Path(__file__).absolute().ancestor(1) DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', '*****@*****.**'), ) MANAGERS = ADMINS DATABASES = {'default': dj_database_url.config(default='postgres://localhost')} # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
def handle_noargs(self, **kwargs): try: verbosity = int(kwargs['verbosity']) except (KeyError, TypeError, ValueError): verbosity = 1 for release in DocumentRelease.objects.all(): if verbosity >= 1: print "Updating %s..." % release destdir = Path(settings.DOCS_BUILD_ROOT).child(release.lang, release.version) if not destdir.exists(): destdir.mkdir(parents=True) # # Update the release from SCM. # # Make an SCM checkout/update into the destination directory. # Do this dynamically in case we add other SCM later. getattr(self, 'update_%s' % release.scm)(release.scm_url, destdir) # # Use Sphinx to build the release docs into JSON and HTML documents. # for builder in ('json', 'html'): # Make the directory for the built files - sphinx-build doesn't # do it for us, apparently. build_dir = destdir.child('_build', builder) if not build_dir.exists(): build_dir.mkdir(parents=True) # "Shell out" (not exactly, but basically) to sphinx-build. if verbosity >= 2: print " building %s (into %s)" % (builder, build_dir) sphinx.cmdline.main(['sphinx-build', '-b', builder, '-q', # Be vewy qwiet destdir, # Source file directory build_dir, # Destination directory ]) # # Create a zip file of the HTML build for offline reading. # This gets moved into MEDIA_ROOT for downloading. # html_build_dir = destdir.child('_build', 'html') zipfile_name = 'django-docs-%s-%s.zip' % (release.version, release.lang) zipfile_path = Path(settings.MEDIA_ROOT).child('docs', zipfile_name) if not zipfile_path.parent.exists(): zipfile_path.parent.mkdir(parents=True) if verbosity >= 2: print " build zip (into %s)" % zipfile_path with closing(zipfile.ZipFile(zipfile_path, 'w')) as zf: for f in html_build_dir.walk(filter=Path.isfile): zf.write(f, html_build_dir.rel_path_to(f)) # # Rebuild the imported document list and search index. # if not kwargs['reindex']: continue if verbosity >= 2: print " reindexing..." # Build a dict of {path_fragment: document_object}. We'll pop values # out of this dict as we go which'll make sure we know which # remaining documents need to be deleted (and unindexed) later on. documents = dict((doc.path, doc) for doc in release.documents.all()) # Walk the tree we've just built looking for ".fjson" documents # (just JSON, but Sphinx names them weirdly). Each one of those # documents gets a corresponding Document object created which # we'll then ask Sphinx to reindex. # # We have to be a bit careful to reverse-engineer the correct # relative path component, especially for "index" documents, # otherwise the search results will be incorrect. json_build_dir = destdir.child('_build', 'json') for built_doc in json_build_dir.walk(): if built_doc.isfile() and built_doc.ext == '.fjson': # Convert the built_doc path which is now an absolute # path (i.e. "/home/docs/en/1.2/_build/ref/models.json") # into a path component (i.e. "ref/models"). path = json_build_dir.rel_path_to(built_doc) if path.stem == 'index': path = path.parent path = str(path.parent.child(path.stem)) # Read out the content and create a new Document object for # it. We'll strip the HTML tags here (for want of a better # place to do it). with open(built_doc) as fp: json_doc = json.load(fp) try: json_doc['body'] # Just to make sure it exists. title = strip_tags(json_doc['title']) except KeyError, ex: if verbosity >= 2: print "Skipping: %s (no %s)" % (path, ex.args[0]) continue doc = documents.pop(path, Document(path=path, release=release)) doc.title = title doc.save() haystack.site.update_object(doc) # Clean up any remaining documents. for doc in documents.values(): if verbosity >= 2: print "Deleting:", doc haystack.site.remove_object(doc) doc.delete()
Generated by 'django-admin startproject' using Django 1.9.5. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os from unipath import FSPath as Path # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PROJECT_DIR = Path(__file__).absolute().ancestor(1) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '1ge02tu1cn-g*ji#(l_$qrobv=il#zavyym!0$x04h1hm7)_zp' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition # Applications
# Django settings for neurobrush_web project. from unipath import FSPath as Path from .services import * PROJECT_DIR = Path(__file__).absolute().ancestor(2) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = (('Ilan Bar-Magen', '*****@*****.**'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': '', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: 'USER': '', 'PASSWORD': '', 'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP. 'PORT': '', # Set to empty string for default. } } # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = []
# Settings for www.djangoproject.com import os import json import platform from unipath import FSPath as Path # The full path to the django_website directory. BASE = Path(__file__).absolute().ancestor(2) # Far too clever trick to know if we're running on the deployment server. PRODUCTION = ('DJANGOPROJECT_DEBUG' not in os.environ) and ("djangoproject" in platform.node()) # It's a secret to everybody SECRETS = json.load(open(BASE.ancestor(2).child('secrets.json'))) SECRET_KEY = str(SECRETS['secret_key']) ADMINS = (('Adrian Holovaty','*****@*****.**'),('Jacob Kaplan-Moss', '*****@*****.**')) MANAGERS = (('Jacob Kaplan-Moss','*****@*****.**'),) TIME_ZONE = 'America/Chicago' SERVER_EMAIL = '*****@*****.**' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'djangoproject', 'USER': '******' }, 'trac': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'code.djangoproject',
import os import dj_database_url from unipath import FSPath as Path def env_or_default(NAME, default): return os.environ.get(NAME, default) PROJECT_ROOT = Path(__file__).ancestor(3) PACKAGE_ROOT = PROJECT_ROOT.child('djangocon') BASE_DIR = PACKAGE_ROOT DEBUG = bool(int(os.environ.get("DEBUG", "1"))) DATABASES = { "default": dj_database_url.config(default="postgres://localhost/djangocon2016") } ALLOWED_HOSTS = [ os.environ.get("GONDOR_INSTANCE_DOMAIN"), "2016.djangocon.us", "www.djangocon.us", "localhost'," ] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same
# Django settings for www project. import json import platform from unipath import FSPath as Path ADMINS = ( ('Kael', '*****@*****.**'), ) MANAGERS = ADMINS BASE = Path(__file__).absolute().ancestor(1) # Presume that only the production server will run on hoth # This isn't the best longterm solution. PRODUCTION = ("endor" in platform.node()) ALLOWED_HOSTS = [ 'kaelspencer.com', '127.0.0.1', 'localhost' ] if PRODUCTION: print "Production: true" else: print "Production: false" SECRETS = json.load(open('secrets.json')) SECRET_KEY = str(SECRETS['secret_key'])
from unipath import FSPath as Path PROJECT_DIR = Path(__file__).absolute().ancestor(2) # # My settings # _creds = PROJECT_DIR.child('trac-creds.txt').read_file().strip() TRAC_RPC_URL = "https://%[email protected]/login/rpc" % _creds TRAC_URL = "https://code.djangoproject.com/" # # Django settings follow... # DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', '*****@*****.**'), ) MANAGERS = ADMINS TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' SITE_ID = 1 USE_I18N = True USE_L10N = True
import os from unipath import FSPath as Path PROJECT_DIR = Path(__file__).absolute().ancestor(2) AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY') AWS_STORAGE_BUCKET_NAME = 'telostats' DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Idan Gazit', '*****@*****.**'), ('Yuval Adam', '*****@*****.**'), ) MANAGERS = ADMINS INTERNAL_IPS = ('127.0.0.1', ) SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' TIME_ZONE = 'Etc/UTC' USE_TZ = True LANGUAGE_CODE = 'en-us' SITE_ID = 1 USE_I18N = False USE_L10N = True MEDIA_ROOT = PROJECT_DIR.child('media') MEDIA_URL = 'http://' + AWS_STORAGE_BUCKET_NAME + '.s3.amazonaws.com/media'
import os import dj_database_url from unipath import FSPath as Path def env_or_default(NAME, default): return os.environ.get(NAME, default) PROJECT_ROOT = Path(__file__).ancestor(3) PACKAGE_ROOT = PROJECT_ROOT.child('djangocon') BASE_DIR = PACKAGE_ROOT DEBUG = bool(int(os.environ.get("DEBUG", "1"))) DATABASES = { "default": dj_database_url.config(default="postgres://localhost/djangocon2016") } ALLOWED_HOSTS = [ os.environ.get("GONDOR_INSTANCE_DOMAIN"), "2016.djangocon.us", "www.djangocon.us", "localhost'," ] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems.
import platform from unipath import FSPath as Path BASE = Path(__file__).parent DEBUG = TEMPLATE_DEBUG = platform.node() != 'jacobian.org' MANAGERS = ADMINS = [] TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' USE_I18N = False USE_L10N = False DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': BASE.child('nn.db'), } } MIDDLEWARE_CLASSES = [] SITE_ID = 1 SECRET_KEY = 'LOCAL' if DEBUG else open('/home/web/sekrit.txt').read().strip() ROOT_URLCONF = 'djangome.urls' INSTALLED_APPS = ['djangome', 'gunicorn'] TEMPLATE_DIRS = [BASE.child('templates')] REDIS = { 'host': 'localhost', 'port': 6379,
def _fget(self): d = Path(getattr(self, config_name, default)) return self.path.child(*d.components())
from unipath import FSPath as Path PROJECT_DIR = Path(__file__).absolute().ancestor(2) ###################################### # Main ###################################### DEBUG = True ROOT_URLCONF = 'urls' SITE_ID = 1 ###################################### # Apps ###################################### INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'south', 'board', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware',
from unipath import FSPath as Path # monkey-patch python-openid to work with nycga.net #from utils import monkey_patch_openid; monkey_patch_openid() PROJECT_DIR = Path(__file__).absolute().ancestor(2) DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Ori Livneh', '*****@*****.**'), ) MANAGERS = ADMINS # needed by django-debug-toolbar INTERNAL_IPS = ('127.0.0.1',) # l10n / i18n TIME_ZONE = 'America/New_York' LANGUAGE_CODE = 'en-us' SITE_ID = 1 USE_I18N = True USE_L10N = True MEDIA_ROOT = PROJECT_DIR.child('media') MEDIA_URL = '/media/' STATIC_ROOT = PROJECT_DIR.child('static_root') STATIC_URL = '/static/'
import sys from unipath import FSPath as Path PROJECT_ROOT = Path(__file__).absolute().ancestor(2) sys.path.insert(0, PROJECT_ROOT.child("apps")) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', '*****@*****.**'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'dev.db', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: 'USER': '', 'PASSWORD': '', 'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP. 'PORT': '', # Set to empty string for default. } } # Hosts/domain names that are valid for this site; required if DEBUG is False
from unipath import FSPath as Path import os PROJECT_DIR = Path(__file__).absolute().ancestor(1) """Operation Settings""" DEBUG = os.environ.get('DEBUG', False) TEMPLATE_DEBUG = DEBUG SECRET_KEY = os.environ['SECRET_KEY'] ADMINS = ( ('SoCraTes Webmaster', '*****@*****.**'), ) MANAGERS = ADMINS # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Allow all host headers ALLOWED_HOSTS = ['*'] LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' }, },
# -*- coding: utf-8 -*- import sys from unipath import FSPath as Path PROJECT_ROOT = Path(__file__).absolute().ancestor(2) sys.path.insert(0, PROJECT_ROOT.child("apps")) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ("Your Name", "*****@*****.**"), ) MANAGERS = ADMINS TIME_ZONE = None LANGUAGE_CODE = "en-us" SITE_ID = 1 USE_I18N = True USE_L10N = True STATIC_ROOT = PROJECT_ROOT.child("static") STATIC_URL = "/static/" STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. )
import os import ConfigParser from unipath import FSPath PROJECT_DIR = FSPath(__file__).absolute().ancestor(3) default_config_dir = os.sep.join((PROJECT_DIR, 'configuration')) # # Environment Settings # SETTINGS_FILE_NAME = os.sep.join((default_config_dir, 'environment.ini')) if not os.path.isfile(SETTINGS_FILE_NAME): raise Exception("Environment settings file '%s' not found." % SETTINGS_FILE_NAME) config = ConfigParser.RawConfigParser() config.read(SETTINGS_FILE_NAME) PORT = int(config.get('system', 'PORT')) IMAGE_FOLDER_PATH = config.get('classifier', 'IMAGE_FOLDER_PATH') CLASSIFIER_TYPE = config.get('classifier', 'CLASSIFIER_TYPE') CLASSIFIER_TRAIN_PATH = config.get('classifier', 'CLASSIFIER_TRAIN_PATH')
# Django settings for oakff_site project. import os from unipath import FSPath as Path PROJECT_DIR = Path(__file__).absolute().ancestor(1) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', '*****@*****.**'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'dummy.db', # Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system.
def run_benchmarks(control, experiment, benchmark_dir, benchmarks, trials, vcs=None, record_dir=None, profile_dir=None, continue_on_error=False): if benchmarks: print "Running benchmarks: %s" % " ".join(benchmarks) else: print "Running all benchmarks" if record_dir: record_dir = Path(record_dir).expand().absolute() if not record_dir.exists(): raise ValueError('Recording directory "%s" does not exist' % record_dir) print "Recording data to '%s'" % record_dir control_label = get_widgy_version(control, vcs=vcs) experiment_label = get_widgy_version(experiment, vcs=vcs) branch_info = "%s branch " % vcs if vcs else "" print "Control: Widgy %s (in %s%s)" % (control_label, branch_info, control) print "Experiment: Widgy %s (in %s%s)" % (experiment_label, branch_info, experiment) print # Calculate the subshell envs that we'll use to execute the # benchmarks in. if vcs: control_env = { 'PYTHONPATH': '%s:%s' % (Path.cwd().absolute(), Path(benchmark_dir)), } experiment_env = control_env.copy() else: control_env = { 'PYTHONPATH': '%s:%s' % (Path(control).absolute(), Path(benchmark_dir)) } experiment_env = { 'PYTHONPATH': '%s:%s' % (Path(experiment).absolute(), Path(benchmark_dir)) } for benchmark in discover_benchmarks(benchmark_dir): if not benchmarks or benchmark.name in benchmarks: print "Running '%s' benchmark ..." % benchmark.name settings_mod = '%s.settings' % benchmark.name control_env['DJANGO_SETTINGS_MODULE'] = settings_mod experiment_env['DJANGO_SETTINGS_MODULE'] = settings_mod if profile_dir is not None: control_env['DJANGOBENCH_PROFILE_FILE'] = Path( profile_dir, "con-%s" % benchmark.name) experiment_env['DJANGOBENCH_PROFILE_FILE'] = Path( profile_dir, "exp-%s" % benchmark.name) try: if vcs: switch_to_branch(vcs, control) control_data = run_benchmark(benchmark, trials, control_env) if vcs: switch_to_branch(vcs, experiment) experiment_data = run_benchmark(benchmark, trials, experiment_env) except SkipBenchmark, reason: print "Skipped: %s\n" % reason continue except RuntimeError, error: if continue_on_error: print "Failed: %s\n" % error continue raise options = argparse.Namespace( track_memory=False, diff_instrumentation=False, benchmark_name=benchmark.name, disable_timelines=True, control_label=control_label, experiment_label=experiment_label, ) result = perf.CompareBenchmarkData(control_data, experiment_data, options) if record_dir: record_benchmark_results( dest=record_dir.child('%s.json' % benchmark.name), name=benchmark.name, result=result, control=control_label, experiment=experiment_label, control_data=control_data, experiment_data=experiment_data, ) print format_benchmark_result(result, len(control_data.runtimes)) print
# Django settings for open-shot project. import os from unipath import FSPath as Path import djcelery from django.core.urlresolvers import reverse PROJECT_DIR = Path(__file__).absolute().ancestor(3) DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = () MANAGERS = ADMINS INTERNAL_IPS = ('127.0.0.1', ) # TODO: test if the next line is good for us # SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' TIME_ZONE = 'Asia/Jerusalem' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html gettext = lambda s: s LANGUAGES = ( ('he', gettext('Hebrew')), ('en', gettext('English')), ('ar', gettext('Arabic')), ('ru', gettext('Russian')), ) LANGUAGE_CODE = LANGUAGES[0][0]
from unipath import FSPath as Path PROJECT_DIR = Path(__file__).absolute().ancestor(2) DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Lior Sion', '*****@*****.**'), ) MANAGERS = ADMINS AUTH_PROFILE_MODULE='userprofile.UserProfile' TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' SITE_ID = 1 USE_I18N = True USE_L10N = True SEND_BROKEN_LINK_EMAILS = True ACCOUNT_ACTIVATION_DAYS = 7 AUTO_GENERATE_AVATAR_SIZES = 80 AVATAR_STORAGE_DIR = "avatars" AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend', 'social_auth.backends.twitter.TwitterBackend', 'social_auth.backends.facebook.FacebookBackend',
# Settings common to www.djangoproject.com and docs.djangoproject.com import json import os import platform from unipath import FSPath as Path ### Utilities # The full path to the repository root. BASE = Path(__file__).absolute().ancestor(2) # Far too clever trick to know if we're running on the deployment server. PRODUCTION = ('DJANGOPROJECT_DEBUG' not in os.environ) # It's a secret to everybody with open(BASE.child('secrets.json')) as handle: SECRETS = json.load(handle) ### Django settings ADMINS = ( ('Adrian Holovaty', '*****@*****.**'), ('Jacob Kaplan-Moss', '*****@*****.**'), ) CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': SECRETS.get('memcached_host', '127.0.0.1:11211'),
# Settings common to www.djangoproject.com and docs.djangoproject.com import json import os import platform from unipath import FSPath as Path ### Utilities # The full path to the repository root. BASE = Path(__file__).absolute().ancestor(2) # Far too clever trick to know if we're running on the deployment server. PRODUCTION = ('DJANGOPROJECT_DEBUG' not in os.environ) and ("djangoproject" in platform.node()) # It's a secret to everybody with open(BASE.parent.child('secrets.json')) as handle: SECRETS = json.load(handle) ### Django settings ADMINS = ( ('Adrian Holovaty', '*****@*****.**'), ('Jacob Kaplan-Moss', '*****@*****.**'), ) CACHES = { 'default': {
def __init__(self, cwd): self.prev_cwd = FSPath.cwd() self.cwd = Path(cwd) if not self.cwd.exists(): self.cwd.mkdir(parents=True)