def command(self): logging.config.fileConfig(self.path_to_ini_file) # get SqlAlchemy session self._init_session() from pylons import config index_location = config["index_dir"] load_rcextensions(config["here"]) repo_location = self.options.repo_location if self.options.repo_location else RepoModel().repos_path repo_list = map(strip, self.options.repo_list.split(",")) if self.options.repo_list else None repo_update_list = ( map(strip, self.options.repo_update_list.split(",")) if self.options.repo_update_list else None ) # ====================================================================== # WHOOSH DAEMON # ====================================================================== from rhodecode.lib.pidlock import LockHeld, DaemonLock from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon try: l = DaemonLock(file_=os.path.join(dn(dn(index_location)), "make_index.lock")) WhooshIndexingDaemon( index_location=index_location, repo_location=repo_location, repo_list=repo_list, repo_update_list=repo_update_list, ).run(full_index=self.options.full_index) l.release() except LockHeld: sys.exit(1)
def command(self): #get SqlAlchemy session self._init_session() from pylons import config index_location = config['index_dir'] load_rcextensions(config['here']) repo_location = self.options.repo_location \ if self.options.repo_location else RepoModel().repos_path repo_list = map(strip, self.options.repo_list.split(',')) \ if self.options.repo_list else None repo_update_list = map(strip, self.options.repo_update_list.split(',')) \ if self.options.repo_update_list else None #====================================================================== # WHOOSH DAEMON #====================================================================== from kallithea.lib.pidlock import LockHeld, DaemonLock from kallithea.lib.indexers.daemon import WhooshIndexingDaemon try: l = DaemonLock(file_=os.path.join(dn(dn(index_location)), 'make_index.lock')) WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location, repo_list=repo_list, repo_update_list=repo_update_list) \ .run(full_index=self.options.full_index) l.release() except LockHeld: sys.exit(1)
def command(self): logging.config.fileConfig(self.path_to_ini_file) from pylons import config add_cache(config) engine = engine_from_config(config, 'sqlalchemy.db1.') init_model(engine) index_location = config['index_dir'] repo_location = self.options.repo_location \ if self.options.repo_location else RepoModel().repos_path repo_list = map(strip, self.options.repo_list.split(',')) \ if self.options.repo_list else None repo_update_list = map(strip, self.options.repo_update_list.split(',')) \ if self.options.repo_update_list else None load_rcextensions(config['here']) #====================================================================== # WHOOSH DAEMON #====================================================================== from rhodecode.lib.pidlock import LockHeld, DaemonLock from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon try: l = DaemonLock(file_=jn(dn(dn(index_location)), 'make_index.lock')) WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location, repo_list=repo_list, repo_update_list=repo_update_list)\ .run(full_index=self.options.full_index) l.release() except LockHeld: sys.exit(1)
def svn_app_version(appname=None, fail_silently=bool(not settings.DEBUG)): """ foo.app {% svn_app_version "foo.app" %} project {% svn_app_version %} """ cname = 'svn_app_version' if appname: cname += '.' + appname version = cache.get(cname) if not version: if not appname: ## RED_FLAG: hard coded relative root! version = get_svn_revision(dn(dn(dn(abspath(__file__))))) elif appname == 'django': version = get_svn_revision() elif appname not in settings.INSTALLED_APPS: version = 'SVN-None' else: try: module = get_app(appname) except: if not fail_silently: raise version = 'SVN-Error' else: version = get_svn_revision(dn(abspath(module.__file__))) cache.set(cname, version, 60*60*24*30) return version
def create_test_env(repos_test_path, config): """ Makes a fresh database and install test repository into tmp dir """ from kallithea.lib.db_manage import DbManage from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH # PART ONE create db dbconf = config['sqlalchemy.db1.url'] log.debug('making test db %s', dbconf) # create test dir if it doesn't exist if not os.path.isdir(repos_test_path): log.debug('Creating testdir %s', repos_test_path) os.makedirs(repos_test_path) dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], tests=True) dbmanage.create_tables(override=True) # for tests dynamically set new root paths based on generated content dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) dbmanage.create_default_user() dbmanage.admin_prompt() dbmanage.create_permissions() dbmanage.populate_default_permissions() Session().commit() # PART TWO make test repo log.debug('making test vcs repositories') idx_path = config['app_conf']['index_dir'] data_path = config['app_conf']['cache_dir'] #clean index and data if idx_path and os.path.exists(idx_path): log.debug('remove %s', idx_path) shutil.rmtree(idx_path) if data_path and os.path.exists(data_path): log.debug('remove %s', data_path) shutil.rmtree(data_path) #CREATE DEFAULT TEST REPOS cur_dir = dn(dn(abspath(__file__))) tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz")) tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) tar.close() cur_dir = dn(dn(abspath(__file__))) tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz")) tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO)) tar.close() #LOAD VCS test stuff from kallithea.tests.vcs import setup_package setup_package()
def __wrapper(func, *fargs, **fkwargs): lockkey = __get_lockkey(func, *fargs, **fkwargs) lockkey_path = dn(dn(dn(os.path.abspath(__file__)))) log.info('running task with lockkey %s', lockkey) try: l = DaemonLock(jn(lockkey_path, lockkey)) ret = func(*fargs, **fkwargs) l.release() return ret except LockHeld: log.info('LockHeld') return 'Task with key %s already running' % lockkey
def create_test_env(repos_test_path, config): """Makes a fresh database and install test repository into tmp dir """ from rhodecode.lib.db_manage import DbManage from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \ HG_FORK, GIT_FORK, TESTS_TMP_PATH import tarfile import shutil from os.path import abspath # PART ONE create db dbconf = config['sqlalchemy.db1.url'] log.debug('making test db %s', dbconf) # create test dir if it doesn't exist if not os.path.isdir(repos_test_path): log.debug('Creating testdir %s' % repos_test_path) os.makedirs(repos_test_path) dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], tests=True) dbmanage.create_tables(override=True) dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) dbmanage.create_default_user() dbmanage.admin_prompt() dbmanage.create_permissions() dbmanage.populate_default_permissions() # PART TWO make test repo log.debug('making test vcs repositories') idx_path = config['app_conf']['index_dir'] data_path = config['app_conf']['cache_dir'] #clean index and data if idx_path and os.path.exists(idx_path): log.debug('remove %s' % idx_path) shutil.rmtree(idx_path) if data_path and os.path.exists(data_path): log.debug('remove %s' % data_path) shutil.rmtree(data_path) #CREATE DEFAULT HG REPOSITORY cur_dir = dn(dn(abspath(__file__))) tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) tar.close()
def __kdepath(name): """ return the path of the kde category directory. If multiple portages are used, return the correct one. """ if name: rootDirFound = False name = normcase(name) for dirname in rootDirectories(): dirname = normcase(dirname) if name.startswith(dirname): name = j(dirname, relpath(name, dirname).split(sep)[0]) rootDirFound = True break # this is hopefully a fallback solution, if not - no idea if not rootDirFound: name = dn(dn(name)) return name
def create_test_index(repo_location, config, full_index): """ Makes default test index :param config: test config :param full_index: """ from kallithea.lib.indexers.daemon import WhooshIndexingDaemon from kallithea.lib.pidlock import DaemonLock, LockHeld repo_location = repo_location index_location = os.path.join(config['app_conf']['index_dir']) if not os.path.exists(index_location): os.makedirs(index_location) try: l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock')) WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location) \ .run(full_index=full_index) l.release() except LockHeld: pass
def upgrade(self): """ Upgrades given database schema to given revision following all needed steps, to perform the upgrade """ from kallithea.lib.dbmigrate.migrate.versioning import api from kallithea.lib.dbmigrate.migrate.exceptions import \ DatabaseNotControlledError if 'sqlite' in self.dburi: print ( '********************** WARNING **********************\n' 'Make sure your version of sqlite is at least 3.7.X. \n' 'Earlier versions are known to fail on some migrations\n' '*****************************************************\n') upgrade = ask_ok('You are about to perform database upgrade, make ' 'sure You backed up your database before. ' 'Continue ? [y/n]') if not upgrade: print 'No upgrade performed' sys.exit(0) repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), 'kallithea/lib/dbmigrate') db_uri = self.dburi try: curr_version = api.db_version(db_uri, repository_path) msg = ('Found current database under version ' 'control with version %s' % curr_version) except (RuntimeError, DatabaseNotControlledError): curr_version = 1 msg = ('Current database is not under version control. Setting ' 'as version %s' % curr_version) api.version_control(db_uri, repository_path, curr_version) notify(msg) if curr_version == __dbversion__: print 'This database is already at the newest version' sys.exit(0) # clear cache keys log.info("Clearing cache keys now...") CacheInvalidation.clear_cache() upgrade_steps = range(curr_version + 1, __dbversion__ + 1) notify('attempting to do database upgrade from ' 'version %s to version %s' % (curr_version, __dbversion__)) # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE _step = None for step in upgrade_steps: notify('performing upgrade step %s' % step) time.sleep(0.5) api.upgrade(db_uri, repository_path, step) notify('schema upgrade for step %s completed' % (step,)) _step = step notify('upgrade to version %s successful' % _step)
from os.path import abspath as ap, dirname as dn from sqlalchemy import Column, ForeignKey, Integer, String, Date, Float, Text, DateTime from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship from sqlalchemy import create_engine Base = declarative_base() DB_PATH = '{0}/d2modeling.db'.format(dn(dn(ap(__file__)))) DB_NAME = 'sqlite:///{0}'.format(DB_PATH) engine = create_engine(DB_NAME) Base.metadata.bind = engine class Team(Base): __tablename__ = 'team' name = Column("name", String(250), primary_key=True) elo = Column("elo", Float, default=1200) class Match(Base): __tablename__ = 'match' id = Column("id", Integer, primary_key=True) dire_score = Column("dire_score", Integer) radiant_score = Column("radiant_score", Integer) time = Column("time", Float) date = Column("date", Date, index=True) winner = Column("winner", String(250)) dire_name = Column("dire_name", String(250), ForeignKey('team.name'), index=True)
\usepackage{amsfonts} \usepackage{amssymb} \usepackage{txfonts}""" # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True # Add an epilog to every read file so as to ensure proper # substituions throughout our documentation rst_epilog = "" from os.path import dirname as dn, join as pj f=open(pj(dn(__file__),"global_substitutions.rst")) rst_epilog += f.read() +"\n" f.close() # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {"python":('http://docs.python.org/', None), "numpy":("http://docs.scipy.org/doc/numpy/", None), "oacore":('http://openalea.gforge.inria.fr/doc/openalea/core/doc/_build/html', None), "oaimage":('http://openalea.gforge.inria.fr/beta_doc/openalea/image/doc/_build/html', None), "oadeploy":('http://openalea.gforge.inria.fr/doc/openalea/deploy/doc/_build/html', None), "oadeploygui":('http://openalea.gforge.inria.fr/doc/openalea/deploygui/doc/_build/html', None), "oamisc":('http://openalea.gforge.inria.fr/doc/openalea/misc/doc/_build/html', None), "oascheduler":('http://openalea.gforge.inria.fr/doc/openalea/scheduler/doc/_build/html', None),
import os from os.path import dirname as dn PROJECT_PATH = os.path.abspath(dn(dn(dn(__file__)))) BOOTSCRIPT_PATH = "%s/scripts" % PROJECT_PATH COMMAND_SET_PATH = "%s/commandsets" % PROJECT_PATH UGO_PATH = "%s/%s" % (os.getenv('HOME'), '.ugo') UGO_PROFILE = ".profile" CONFIG_FILE = "%s/.session.ini" % UGO_PATH DEFAULT_COMMAND_SET = "ugo"
:license: GPLv3, see LICENSE.md for more details. """ import cookielib import urllib import urllib2 import time import os import sys import tempfile from os.path import join as jn from os.path import dirname as dn __here__ = os.path.abspath(__file__) __root__ = dn(dn(dn(__here__))) sys.path.append(__root__) from kallithea.lib import vcs from kallithea.lib.compat import OrderedSet from kallithea.lib.vcs.exceptions import RepositoryError PASES = 3 HOST = 'http://127.0.0.1' PORT = 5000 BASE_URI = '%s:%s/' % (HOST, PORT) if len(sys.argv) == 2: BASE_URI = sys.argv[1] if not BASE_URI.endswith('/'):
import os from os.path import dirname as dn import dj_database_url from ..settings import * from ..project import PROJECT BASE_DIR = dn(dn(dn(dn(os.path.abspath(__file__))))) VAR_DIR = os.path.join(dn(BASE_DIR), 'var') # Application definition INSTALLED_APPS = [ 'main', 'cq', 'xauth', 'jsdata', 'rest_framework', 'django_extensions', 'webpack_loader', 'storages', 'django.contrib.postgres' ] + INSTALLED_APPS INSTALLED_APPS = INSTALLED_APPS + [ 'channels' ]
import os from os.path import dirname as dn, join as jn import pyconf package_dir = dn(__file__) container_dir = dn(package_dir) default_config_path = jn(container_dir, "config.py") def load_config(default_path=default_config_path): rv = pyconf.load(os.environ.get("HNILSSON_CONF", default_config_path)) def setup_logging(): import logging logging.basicConfig(**rv.log_opts) rv.setup_logging = setup_logging return rv
# # KrySA documentation build configuration file, created by sphinx-quickstart # # This file is execfile()d with the current directory set to its # containing dir. # # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import sys import os.path as op from os.path import dirname as dn sys.path.insert(0, op.join(dn(dn(dn(op.abspath(__file__)))), 'krysa')) sys.path.insert(0, dn(dn(op.abspath(__file__)))) try: import kivy import docs_modules except ImportError: print('No Kivy installed...\nBuilding simple docs...') # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.4.4'
from subprocess import Popen, PIPE from paste.deploy import appconfig from pylons import config from sqlalchemy import engine_from_config from rhodecode.lib.utils import add_cache from rhodecode.model import init_model from rhodecode.model import meta from rhodecode.model.db import User, Repository from rhodecode.lib.auth import get_crypt_password from rhodecode.tests import TESTS_TMP_PATH, NEW_HG_REPO, HG_REPO from rhodecode.config.environment import load_environment rel_path = dn(dn(dn(os.path.abspath(__file__)))) conf = appconfig('config:development.ini', relative_to=rel_path) load_environment(conf.global_conf, conf.local_conf) add_cache(conf) USER = '******' PASS = '******' HOST = '127.0.0.1:5000' DEBUG = True log = logging.getLogger(__name__) class Command(object): def __init__(self, cwd):
def pypy_find_stdlib(s): from os.path import abspath, join, dirname as dn thisfile = abspath(__file__) root = dn(dn(dn(thisfile))) return [join(root, 'lib-python', '2.7'), join(root, 'lib_pypy')]
def get_commits_stats(repo_name, ts_min_y, ts_max_y): try: log = get_commits_stats.get_logger() except: log = logging.getLogger(__name__) lockkey = __get_lockkey("get_commits_stats", repo_name, ts_min_y, ts_max_y) lockkey_path = dn(dn(dn(dn(os.path.abspath(__file__))))) log.info("running task with lockkey %s", lockkey) try: lock = l = DaemonLock(jn(lockkey_path, lockkey)) # for js data compatibilty cleans the key for person from ' akc = lambda k: person(k).replace('"', "") co_day_auth_aggr = {} commits_by_day_aggregate = {} repos_path = get_repos_path() repo = get_repo(safe_str(os.path.join(repos_path, repo_name))) repo_size = len(repo.revisions) # return if repo have no revisions if repo_size < 1: lock.release() return True skip_date_limit = True parse_limit = int(config["app_conf"].get("commit_parse_limit")) last_rev = 0 last_cs = None timegetter = itemgetter("time") sa = get_session() dbrepo = sa.query(Repository).filter(Repository.repo_name == repo_name).scalar() cur_stats = sa.query(Statistics).filter(Statistics.repository == dbrepo).scalar() if cur_stats is not None: last_rev = cur_stats.stat_on_revision if last_rev == repo.get_changeset().revision and repo_size > 1: # pass silently without any work if we're not on first revision or # current state of parsing revision(from db marker) is the # last revision lock.release() return True if cur_stats: commits_by_day_aggregate = OrderedDict(json.loads(cur_stats.commit_activity_combined)) co_day_auth_aggr = json.loads(cur_stats.commit_activity) log.debug("starting parsing %s", parse_limit) lmktime = mktime last_rev = last_rev + 1 if last_rev > 0 else last_rev for cs in repo[last_rev : last_rev + parse_limit]: last_cs = cs # remember last parsed changeset k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) if akc(cs.author) in co_day_auth_aggr: try: l = [timegetter(x) for x in co_day_auth_aggr[akc(cs.author)]["data"]] time_pos = l.index(k) except ValueError: time_pos = False if time_pos >= 0 and time_pos is not False: datadict = co_day_auth_aggr[akc(cs.author)]["data"][time_pos] datadict["commits"] += 1 datadict["added"] += len(cs.added) datadict["changed"] += len(cs.changed) datadict["removed"] += len(cs.removed) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: datadict = { "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), } co_day_auth_aggr[akc(cs.author)]["data"].append(datadict) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: co_day_auth_aggr[akc(cs.author)] = { "label": akc(cs.author), "data": [ { "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), } ], "schema": ["commits"], } # gather all data by day if k in commits_by_day_aggregate: commits_by_day_aggregate[k] += 1 else: commits_by_day_aggregate[k] = 1 overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) if not co_day_auth_aggr: co_day_auth_aggr[akc(repo.contact)] = {"label": akc(repo.contact), "data": [0, 1], "schema": ["commits"]} stats = cur_stats if cur_stats else Statistics() stats.commit_activity = json.dumps(co_day_auth_aggr) stats.commit_activity_combined = json.dumps(overview_data) log.debug("last revison %s", last_rev) leftovers = len(repo.revisions[last_rev:]) log.debug("revisions to parse %s", leftovers) if last_rev == 0 or leftovers < parse_limit: log.debug("getting code trending stats") stats.languages = json.dumps(__get_codes_stats(repo_name)) try: stats.repository = dbrepo stats.stat_on_revision = last_cs.revision if last_cs else 0 sa.add(stats) sa.commit() except: log.error(traceback.format_exc()) sa.rollback() lock.release() return False # final release lock.release() # execute another task if celery is enabled if len(repo.revisions) > 1 and CELERY_ON: run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y) return True except LockHeld: log.info("LockHeld") return "Task with key %s already running" % lockkey
def load_json_fixture(self, file_name): fixtures_path = _j(dn(dn(os.path.realpath(__file__))), 'fixtures') base_path = _j(_j(fixtures_path, 'search'), file_name) with open(base_path, 'r') as f: return json.loads(f.read())
#pylint: disable=too-many-locals,no-self-use #pylint: disable=too-many-public-methods,too-many-statements """ General unit tests. Run 'python3 -m unittest -v' from the ebml directory. """ import unittest from io import BytesIO import os import sys import random from os.path import dirname as dn, abspath as ap sys.path.append(dn(dn(dn(ap(__file__))))) TEST_DATA_DIR = os.path.dirname(__file__) TEST_FILE = os.path.join(TEST_DATA_DIR, 'test.mkv') __all__ = ['EbmlTest', 'UtilityTest', 'HeaderTest', 'TagsTest', 'ParsedTest', 'FilesTest', 'UNK_ID', 'TEST_FILE_DATA'] import logging LOG = logging.getLogger(__name__) LOG.setLevel(logging.INFO) UNK_ID = 0x01223344 with open(TEST_FILE, 'rb') as mkv_file: TEST_FILE_DATA = mkv_file.read()
# -*- coding: utf-8 -*- import sys # TODO: haaaack from os.path import abspath, dirname as dn sys.path.append(dn(dn(dn(abspath(__file__))))) from strata.core import Variable
import os from os import path from os.path import dirname as dn import copy from couchdb import Server from couchdb.http import PreconditionFailed from datetime import datetime, timedelta from hashlib import sha1 import random import struct from radarpost.mailbox import create_mailbox as _create_mailbox from radarpost.config import load_config TEST_INI_KEY = 'RADAR_TEST_CONFIG' DEFAULT_RADAR_TEST_CONFIG = path.join(dn(dn(dn(__file__))), 'test.ini') TEST_DATA_DIR = path.join(dn(__file__), 'data') TEST_MAILBOX_ID = 'rp_test_mailbox' def get_config_filename(): filename = os.environ.get('RADAR_TEST_CONFIG', None) if filename is None: filename = DEFAULT_RADAR_TEST_CONFIG return filename def load_test_config(): return load_config(get_config_filename()) def get_data(filename): return open(path.join(TEST_DATA_DIR, filename)).read()
# Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = "" # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = "" # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" from os.path import dirname as dn, join as path_join STATIC_ROOT = path_join(dn(dn(dn(dn(__file__)))), "static") # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = "/static/" # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = (
import os import sys import re import shutil import logging import datetime from os.path import dirname as dn, join as jn from rhodecode.model import init_model from rhodecode.lib.utils2 import engine_from_config from rhodecode.model.db import RhodeCodeUi #to get the rhodecode import sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) from rhodecode.lib.utils import BasePasterCommand, Command, ask_ok,\ REMOVED_REPO_PAT, add_cache log = logging.getLogger(__name__) class CleanupCommand(BasePasterCommand): max_args = 1 min_args = 1 usage = "CONFIG_FILE" summary = "Cleanup deleted repos" group_name = "RhodeCode"
def upgrade(self): """ Upgrades given database schema to given revision following all needed steps, to perform the upgrade """ from rhodecode.lib.dbmigrate.migrate.versioning import api from rhodecode.lib.dbmigrate.migrate.exceptions import \ DatabaseNotControlledError if 'sqlite' in self.dburi: print ( '********************** WARNING **********************\n' 'Make sure your version of sqlite is at least 3.7.X. \n' 'Earlier versions are known to fail on some migrations\n' '*****************************************************\n' ) upgrade = ask_ok('You are about to perform database upgrade, make ' 'sure You backed up your database before. ' 'Continue ? [y/n]') if not upgrade: sys.exit('Nothing done') repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), 'rhodecode/lib/dbmigrate') db_uri = self.dburi try: curr_version = api.db_version(db_uri, repository_path) msg = ('Found current database under version' ' control with version %s' % curr_version) except (RuntimeError, DatabaseNotControlledError): curr_version = 1 msg = ('Current database is not under version control. Setting' ' as version %s' % curr_version) api.version_control(db_uri, repository_path, curr_version) notify(msg) if curr_version == __dbversion__: sys.exit('This database is already at the newest version') #====================================================================== # UPGRADE STEPS #====================================================================== class UpgradeSteps(object): """ Those steps follow schema versions so for example schema for example schema with seq 002 == step_2 and so on. """ def __init__(self, klass): self.klass = klass def step_0(self): # step 0 is the schema upgrade, and than follow proper upgrades notify('attempting to do database upgrade to version %s' \ % __dbversion__) api.upgrade(db_uri, repository_path, __dbversion__) notify('Schema upgrade completed') def step_1(self): pass def step_2(self): notify('Patching repo paths for newer version of RhodeCode') self.klass.fix_repo_paths() notify('Patching default user of RhodeCode') self.klass.fix_default_user() log.info('Changing ui settings') self.klass.create_ui_settings() def step_3(self): notify('Adding additional settings into RhodeCode db') self.klass.fix_settings() notify('Adding ldap defaults') self.klass.create_ldap_options(skip_existing=True) def step_4(self): notify('create permissions and fix groups') self.klass.create_permissions() self.klass.fixup_groups() def step_5(self): pass def step_6(self): notify('re-checking permissions') self.klass.create_permissions() notify('installing new UI options') sett4 = RhodeCodeSetting('show_public_icon', True) Session().add(sett4) sett5 = RhodeCodeSetting('show_private_icon', True) Session().add(sett5) sett6 = RhodeCodeSetting('stylify_metatags', False) Session().add(sett6) notify('fixing old PULL hook') _pull = RhodeCodeUi.get_by_key('preoutgoing.pull_logger') if _pull: _pull.ui_key = RhodeCodeUi.HOOK_PULL Session().add(_pull) notify('fixing old PUSH hook') _push = RhodeCodeUi.get_by_key('pretxnchangegroup.push_logger') if _push: _push.ui_key = RhodeCodeUi.HOOK_PUSH Session().add(_push) notify('installing new pre-push hook') hooks4 = RhodeCodeUi() hooks4.ui_section = 'hooks' hooks4.ui_key = RhodeCodeUi.HOOK_PRE_PUSH hooks4.ui_value = 'python:rhodecode.lib.hooks.pre_push' Session().add(hooks4) notify('installing new pre-pull hook') hooks6 = RhodeCodeUi() hooks6.ui_section = 'hooks' hooks6.ui_key = RhodeCodeUi.HOOK_PRE_PULL hooks6.ui_value = 'python:rhodecode.lib.hooks.pre_pull' Session().add(hooks6) notify('installing hgsubversion option') # enable hgsubversion disabled by default hgsubversion = RhodeCodeUi() hgsubversion.ui_section = 'extensions' hgsubversion.ui_key = 'hgsubversion' hgsubversion.ui_value = '' hgsubversion.ui_active = False Session().add(hgsubversion) notify('installing hg git option') # enable hggit disabled by default hggit = RhodeCodeUi() hggit.ui_section = 'extensions' hggit.ui_key = 'hggit' hggit.ui_value = '' hggit.ui_active = False Session().add(hggit) notify('re-check default permissions') default_user = User.get_by_username(User.DEFAULT_USER) perm = Permission.get_by_key('hg.fork.repository') reg_perm = UserToPerm() reg_perm.user = default_user reg_perm.permission = perm Session().add(reg_perm) def step_7(self): perm_fixes = self.klass.reset_permissions(User.DEFAULT_USER) Session().commit() if perm_fixes: notify('There was an inconsistent state of permissions ' 'detected for default user. Permissions are now ' 'reset to the default value for default user. ' 'Please validate and check default permissions ' 'in admin panel') def step_8(self): self.klass.populate_default_permissions() self.klass.create_default_options(skip_existing=True) Session().commit() def step_9(self): perm_fixes = self.klass.reset_permissions(User.DEFAULT_USER) Session().commit() if perm_fixes: notify('There was an inconsistent state of permissions ' 'detected for default user. Permissions are now ' 'reset to the default value for default user. ' 'Please validate and check default permissions ' 'in admin panel') upgrade_steps = [0] + range(curr_version + 1, __dbversion__ + 1) # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE _step = None for step in upgrade_steps: notify('performing upgrade step %s' % step) getattr(UpgradeSteps(self), 'step_%s' % step)() self.sa.commit() _step = step notify('upgrade to version %s successful' % _step)
def upgrade(self): """Upgrades given database schema to given revision following all needed steps, to perform the upgrade """ from rhodecode.lib.dbmigrate.migrate.versioning import api from rhodecode.lib.dbmigrate.migrate.exceptions import \ DatabaseNotControlledError if 'sqlite' in self.dburi: print ( '********************** WARNING **********************\n' 'Make sure your version of sqlite is at least 3.7.X. \n' 'Earlier versions are known to fail on some migrations\n' '*****************************************************\n' ) upgrade = ask_ok('You are about to perform database upgrade, make ' 'sure You backed up your database before. ' 'Continue ? [y/n]') if not upgrade: sys.exit('Nothing done') repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), 'rhodecode/lib/dbmigrate') db_uri = self.dburi try: curr_version = api.db_version(db_uri, repository_path) msg = ('Found current database under version' ' control with version %s' % curr_version) except (RuntimeError, DatabaseNotControlledError): curr_version = 1 msg = ('Current database is not under version control. Setting' ' as version %s' % curr_version) api.version_control(db_uri, repository_path, curr_version) print (msg) if curr_version == __dbversion__: sys.exit('This database is already at the newest version') #====================================================================== # UPGRADE STEPS #====================================================================== class UpgradeSteps(object): """ Those steps follow schema versions so for example schema for example schema with seq 002 == step_2 and so on. """ def __init__(self, klass): self.klass = klass def step_0(self): #step 0 is the schema upgrade, and than follow proper upgrades print ('attempting to do database upgrade to version %s' \ % __dbversion__) api.upgrade(db_uri, repository_path, __dbversion__) print ('Schema upgrade completed') def step_1(self): pass def step_2(self): print ('Patching repo paths for newer version of RhodeCode') self.klass.fix_repo_paths() print ('Patching default user of RhodeCode') self.klass.fix_default_user() log.info('Changing ui settings') self.klass.create_ui_settings() def step_3(self): print ('Adding additional settings into RhodeCode db') self.klass.fix_settings() print ('Adding ldap defaults') self.klass.create_ldap_options(skip_existing=True) upgrade_steps = [0] + range(curr_version + 1, __dbversion__ + 1) #CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE for step in upgrade_steps: print ('performing upgrade step %s' % step) getattr(UpgradeSteps(self), 'step_%s' % step)()
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import with_statement import os import sys import logging from os.path import dirname as dn, join as jn from rhodecode.model.scm import ScmModel #to get the rhodecode import rc_path = dn(dn(dn(os.path.realpath(__file__)))) sys.path.append(rc_path) from rhodecode.lib.utils import BasePasterCommand, repo2db_mapper from rhodecode.model.db import Repository from rhodecode.model.repo import RepoModel from rhodecode.model.meta import Session log = logging.getLogger(__name__) class Command(BasePasterCommand): max_args = 1 min_args = 1
def _build_dir(): from os.path import dirname as dn, join, realpath mono_repo_dir = dn(dn(dn(realpath(__file__)))) return join(mono_repo_dir, 'pho-doc', 'notecards', 'entities')
def upgrade(self): """ Upgrades given database schema to given revision following all needed steps, to perform the upgrade """ from kallithea.lib.dbmigrate.migrate.versioning import api from kallithea.lib.dbmigrate.migrate.exceptions import \ DatabaseNotControlledError if 'sqlite' in self.dburi: print( '********************** WARNING **********************\n' 'Make sure your version of sqlite is at least 3.7.X. \n' 'Earlier versions are known to fail on some migrations\n' '*****************************************************\n') upgrade = ask_ok('You are about to perform database upgrade, make ' 'sure You backed up your database before. ' 'Continue ? [y/n]') if not upgrade: print 'No upgrade performed' sys.exit(0) repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), 'kallithea/lib/dbmigrate') db_uri = self.dburi try: curr_version = api.db_version(db_uri, repository_path) msg = ('Found current database under version ' 'control with version %s' % curr_version) except (RuntimeError, DatabaseNotControlledError): curr_version = 1 msg = ('Current database is not under version control. Setting ' 'as version %s' % curr_version) api.version_control(db_uri, repository_path, curr_version) notify(msg) if curr_version == __dbversion__: print 'This database is already at the newest version' sys.exit(0) # clear cache keys log.info("Clearing cache keys now...") CacheInvalidation.clear_cache() upgrade_steps = range(curr_version + 1, __dbversion__ + 1) notify('attempting to do database upgrade from ' 'version %s to version %s' % (curr_version, __dbversion__)) # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE _step = None for step in upgrade_steps: notify('performing upgrade step %s' % step) time.sleep(0.5) api.upgrade(db_uri, repository_path, step) notify('schema upgrade for step %s completed' % (step, )) _step = step notify('upgrade to version %s successful' % _step)
# You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import sys import logging import traceback from shutil import rmtree from time import mktime from os.path import dirname as dn from os.path import join as jn #to get the rhodecode import project_path = dn(dn(dn(dn(os.path.realpath(__file__))))) sys.path.append(project_path) from rhodecode.config.conf import INDEX_EXTENSIONS from rhodecode.model.scm import ScmModel from rhodecode.lib.utils2 import safe_unicode from rhodecode.lib.indexers import SCHEMA, IDX_NAME from rhodecode.lib.vcs.exceptions import ChangesetError, RepositoryError, \ NodeDoesNotExistError from whoosh.index import create_in, open_dir log = logging.getLogger('whoosh_indexer')
def hub_mod_name_and_mod_dir(): pcs = 'kiss_rdb', 'storage_adapters_' from os.path import join as path_join, dirname as dn return '.'.join(pcs), path_join(dn(dn(__file__)), *pcs)