def __init__(self, path, env=None, config=None):
        self.path = os.path.normpath(os.path.abspath(path))
        if not os.path.isdir(self.path):
            raise Exception('No project can be found at %r' % self.path)
        self.env = env
        self.config = config
        if not config and env:
            self.config = env.config
        if not self.config:
            self.config = Config()
        self.htdocs_path = join(path, 'htdocs')
        self.daemon = None
        self._db_backend = None
        self.db_path = join(path, 'db')
        if not os.path.isdir(self.db_path):
            os.makedirs(self.db_path)
        self._deployer = None

        self.admin_log = logging.getLogger('synthesepy.admin')
        self.admin_log.setLevel(logging.DEBUG)
        self.admin_log_path = join(self.path, 'logs', 'admin.log')

        class KeptClosedFileHandler(logging.FileHandler):
            '''Extension of logging.FileHandler which tries to keep the log
            file closed, so that multiple processes can write to it.
            Concurrent access might have unpredictable results though'''
            def emit(self, record):
                logging.FileHandler.emit(self, record)
                self.close()

        # FIXME: older Pythons have an issue when overriding FileHandler
        if sys.version_info < (2, 6, 6):
            if not os.path.isfile(self.admin_log_path):
                utils.maybe_makedirs(os.path.dirname(self.admin_log_path))
                open(self.admin_log_path, 'wb').write('')
            admin_handler = logging.FileHandler(self.admin_log_path)
        else:
            admin_handler = KeptClosedFileHandler(
                self.admin_log_path, delay=True)

        admin_formatter = logging.Formatter(
            '%(asctime)s  %(levelname)-12s  %(message)s')
        admin_handler.setFormatter(admin_formatter)
        self.admin_log.addHandler(admin_handler)

        self._read_config()

        # env might not be available yet. set_env should be called once ready
        # to complete the project initialization.
        if self.env:
            self.set_env(self.env)
class Project(object):
    def __init__(self, path, env=None, config=None):
        self.path = os.path.normpath(os.path.abspath(path))
        if not os.path.isdir(self.path):
            raise Exception('No project can be found at %r' % self.path)
        self.env = env
        self.config = config
        if not config and env:
            self.config = env.config
        if not self.config:
            self.config = Config()
        self.htdocs_path = join(path, 'htdocs')
        self.daemon = None
        self._db_backend = None
        self.db_path = join(path, 'db')
        if not os.path.isdir(self.db_path):
            os.makedirs(self.db_path)
        self._deployer = None

        self.admin_log = logging.getLogger('synthesepy.admin')
        self.admin_log.setLevel(logging.DEBUG)
        self.admin_log_path = join(self.path, 'logs', 'admin.log')

        class KeptClosedFileHandler(logging.FileHandler):
            '''Extension of logging.FileHandler which tries to keep the log
            file closed, so that multiple processes can write to it.
            Concurrent access might have unpredictable results though'''
            def emit(self, record):
                logging.FileHandler.emit(self, record)
                self.close()

        # FIXME: older Pythons have an issue when overriding FileHandler
        if sys.version_info < (2, 6, 6):
            if not os.path.isfile(self.admin_log_path):
                utils.maybe_makedirs(os.path.dirname(self.admin_log_path))
                open(self.admin_log_path, 'wb').write('')
            admin_handler = logging.FileHandler(self.admin_log_path)
        else:
            admin_handler = KeptClosedFileHandler(
                self.admin_log_path, delay=True)

        admin_formatter = logging.Formatter(
            '%(asctime)s  %(levelname)-12s  %(message)s')
        admin_handler.setFormatter(admin_formatter)
        self.admin_log.addHandler(admin_handler)

        self._read_config()

        # env might not be available yet. set_env should be called once ready
        # to complete the project initialization.
        if self.env:
            self.set_env(self.env)

    def set_env(self, env):
        self.env = env
        self.packages_loader = PackagesLoader(self)
        self._load_sites()
        self._load_packages()
        self.daemon = daemon.Daemon(self.env, self)
        self.imports_manager = imports.ImportsManager(self)

        for env_config_name in self.config.env_config_names.split(','):
            if not env_config_name:
                continue
            if env_config_name not in self.config.env_configs:
                raise Exception('Invalid env_config_name %r. Possible ones are '
                    '%r' % (env_config_name, self.config.env_configs.keys()))
            self.config.update_from_dict(
                self.config.env_configs[env_config_name])

        if not self.config.remote_db_path:
            sep = '/' if self.config.remote_project_path.startswith('/') else '\\'
            self.config.remote_db_path = sep.join([
                self.config.remote_project_path, 'db', 'config.db3'])

        manager_path = join(self.path, 'manager')
        self.manager_module = None
        if (not os.path.isdir(manager_path) or
            not os.path.isfile(join(manager_path, 'main.py'))):
            return

        sys.path.append(manager_path)
        self.manager_module = __import__('main')
        log.debug('Manager module %r', self.manager_module)

    def get_site(self, site_name):
        for s in self.sites:
            if s.name == site_name:
                return s
        return None

    def _read_config(self):
        for suffix in ['', '_local', '_local_' + socket.gethostname()]:
            config_path = join(
                self.path, 'config{suffix}.py'.format(suffix=suffix))
            log.debug('Trying to read config file: %r', config_path)
            if not os.path.isfile(config_path):
                continue
            log.debug('Reading config file: %r', config_path)
            self.config.update_from_file(config_path)

        # Set a path to the sqlite db if not set explicitly.

        conn_info = db_backends.ConnectionInfo(self.config.conn_string)
        if conn_info.backend == 'sqlite' and 'path' not in conn_info:
            conn_info['path'] = join(self.path, 'db', 'config.db3')
            self.config.conn_string = conn_info.conn_string

        # Set defaults

        self.config.project_path = self.path
        if not self.config.project_name:
            self.config.project_name = os.path.split(self.path)[1]
        if not self.config.log_file:
            self.config.log_file = join(self.path, 'logs', 'synthese.log')
        log_dir = os.path.dirname(self.config.log_file)
        if not os.path.isdir(log_dir):
            os.makedirs(log_dir)

        log.debug('Config: %s', self.config)

    def _load_sites(self):
        self.sites = []
        for site_path in sorted(glob.glob(join(self.path, 'sites', '*'))):
            if not os.path.isdir(site_path):
                continue
            self.sites.append(Site(self, site_path))
        log.debug('Found sites: %s', self.sites)

        self.config.static_paths = []
        admin_site = self.get_site('admin')
        admin_package = None
        if admin_site:
            admin_package = admin_site.get_package('admin')
        for site in self.sites:
            for package in site.packages + [admin_package]:
                if not package:
                    continue
                self.config.static_paths.append(
                    (site.base_path, package.files_path))

        if self.config.default_site:
            site_ids = [s.id for s in self.sites if
                s.name == self.config.default_site]
            if len(site_ids) != 1:
                raise Exception('Can\'t find site {0!r} (found: {1!r})'.format(
                    self.config.default_site, site_ids))
            self.config.site_id = site_ids[0]
        if self.config.site_id <= 0:
            non_admin_sites = [s for s in self.sites if s.name != 'admin']
            if len(non_admin_sites) > 0:
                self.config.site_id = non_admin_sites[0].id

    def _load_packages(self):
        self.packages = []
        packages_dir = join(self.path, 'packages')
        if not os.path.isdir(packages_dir):
            return
        self.packages = self.packages_loader.load_packages([], packages_dir)

    def _get_sites_and_packages(self):
        for package in self.packages:
            yield None, package
        for site in self.sites:
            for package in site.packages:
                yield site, package

    def _run_testdata_importer(self):
        importer_path = self.env.testdata_importer_path
        log.info('Runing testdata importer from %r', importer_path)
        self.env.prepare_for_launch()
        env = os.environ.copy()
        env['SYNTHESE_TESTDATA_CONNSTRING'] = self.config.conn_string + \
            ',triggerCheck=0'
	# FIXME: The testdata importer binary does not compile anymore we just skip its call.
        #utils.call(importer_path, env=env)

    @property
    def db_backend(self):
        if self._db_backend:
            return self._db_backend
        self._db_backend = db_backends.create_backend(
            self.env, self.config.conn_string)
        return self._db_backend

    def clean(self):
        self.db_backend.drop_db()

    @command()
    def load_data(self, local=False, overwrite=True):
        """Load data into the database."""
        log.info('loading_data into project (local:%s)', local)
        # TODO: don't import fixtures from a package more than once.
        for site, package in self._get_sites_and_packages():
            log.debug('Loading site:%s package:%s', site, package)
            package.load_data(site, local, overwrite)

    @command()
    def load_local_data(self, overwrite):
        """
        Load data into the database (data loaded only once meant to be edited)
        """
        self.load_data(True, overwrite)

    @command()
    def reset(self):
        """
        Delete database and load inital data.
        """
        log.info('Resetting project')
        self.clean()
        self.db_backend.init_db()
        self.load_data()
        self.load_local_data(True)

    def send_restart_mail(self, fail_kind, restart_count, last_start_s):
        if not self.config.send_mail_on_restart:
            return

        log.info('Sending restart mail')
        hostname = socket.gethostname()
        LINE_COUNT = 500
        try:
            last_log = utils.tail(open(self.config.log_file, 'rb'), LINE_COUNT)
        except IOError:
            last_log = "[Not available]"

        subject = ('Synthese {fail_kind} on {hostname} (project: {project}, '
            'restarts: {restart_count})'.format(
                fail_kind=fail_kind,
                hostname=hostname,
                project=self.config.project_name,
                restart_count=restart_count))
        body = '''
Detected Synthese {fail_kind} on {hostname}. It is going to restart.
Total restart count: {restart_count}. Seconds since last start: {uptime_s}.

Last {line_count} lines of log:
{last_log}

Have a nice day,
The synthese.py wrapper script.
'''.format(
            fail_kind=fail_kind,
            hostname=hostname,
            restart_count=restart_count,
            line_count=LINE_COUNT,
            last_log=last_log,
            uptime_s=int(time.time() - last_start_s))

        utils.send_mail(self.env.config, self.config.mail_admins, subject, body)

    @command()
    def rundaemon(self, block=True):
        """Run Synthese daemon"""

        migration.maybe_migrate_schema(self)

        self.daemon.start()
        if not block:
            return
        log.info('Daemon running, press ctrl-c to stop')
        if (self.manager_module and
            hasattr(self.manager_module, 'on_daemon_started')):
            self.manager_module.on_daemon_started(self)

        hang_detector_running = True
        hang_detected_event = threading.Event()

        def hang_detector():
            HANG_CHECK_INTERVAL_S = 60
            RESTART_HANG_COUNT = 2
            hangs_count = 0
            while hang_detector_running:
                if not self.daemon.ready:
                    log.debug('Hang detector: daemon not ready')
                    time.sleep(HANG_CHECK_INTERVAL_S)
                    hangs_count = 0
                    continue
                log.debug('Checking for hangs')
                if not utils.can_connect(self.config.port, False, '/hang_check'):
                    hangs_count += 1
                    log.warn('Detected hang. Hangs count is %s', hangs_count)
                else:
                    hangs_count = 0
                if hangs_count >= RESTART_HANG_COUNT:
                    hangs_count = 0
                    log.warn('Hang detected, setting hang detected event.')
                    hang_detected_event.set()
                time.sleep(HANG_CHECK_INTERVAL_S)

        restart_if_crashed = (self.config.restart_if_crashed_or_hung or
            self.config.restart_if_crashed)
        restart_if_hung = (self.config.restart_if_crashed_or_hung or
            self.config.restart_if_hung)

        if restart_if_hung:
            hang_detector_thread = threading.Thread(target=hang_detector)
            hang_detector_thread.daemon = True
            hang_detector_thread.start()

        restart_count = 0
        try:
            while True:
                restart = False
                start_time = time.time()
                while (self.daemon.is_running() and
                    not hang_detected_event.is_set()):
                    time.sleep(2)
                if hang_detected_event.is_set():
                    log.info('Hang detected')
                    hang_detected_event.clear()
                    fail_kind = 'hang'
                    self.daemon.stop()
                    restart = True
                else:
                    log.info('Daemon terminated')
                    crashed = not self.daemon.stopped
                    fail_kind = 'crash'
                    if crashed:
                        log.warn('Stop is unexpected, crash?')
                        restart_reason = 'Crash'
                        if restart_if_crashed:
                            restart = True

                if not restart:
                    sys.exit(1)
                    break

                log.info('Restarting daemon')
                self.send_restart_mail(fail_kind, restart_count, start_time)
                self.daemon.start(kill_proxy=False)
                restart_count += 1
        except:
            raise
        finally:
            log.info('Stopping daemon')
            hang_detector_running = False
            self.daemon.stop()

    @command()
    def stopdaemon(self):
        """Stop Synthese daemon"""
        self.daemon.stop()
        # TODO: should use the HTTP method to stop the daemon once it works.
        ports = [self.config.port]
        if self.config.wsgi_proxy:
            ports.append(self.config.wsgi_proxy_port)
        for port in ports:
            utils.kill_listening_processes(port)

    @command()
    def runproxy(self):
        """Run HTTP Proxy to serve static files"""
        proxy.serve_forever(self.env, self)

    # This is mostly for debugging. The webapp is also available with "runproxy"
    # (however it doesn't auto-reload on change).
    @command()
    def runwebapp(self):
        """Run Web Frontend"""
        # import here to avoid import cycles.
        import synthesepy.web

        web_app = synthesepy.web.get_application(project=self)

        def root_app(environ, start_response):
            status = '200 OK'
            output = 'Dummy root app'
            response_headers = [('Content-type', 'text/plain'),
                                ('Content-Length', str(len(output)))]
            start_response(status, response_headers)
            return [output]

        app = wsgi.DispatcherMiddleware(root_app, {
            '/w': web_app
        })
        run_simple('0.0.0.0', 5000, app, use_reloader=True)

    @command()
    def project_command(self, args):
        if not self.manager_module:
            raise Exception('No manager module')
        self.manager_module.project_command(self, args)

    @command()
    def db_view(self):
        """Open database in a GUI tool (if applicable)"""
        if self.db_backend.name == 'sqlite':
            utils.call(
                [self.config.spatialite_gui_path,
                    self.db_backend.conn_info['path']],
                bg=True)
        else:
            raise NotImplementedError("Not implemented for this backend")

    @command()
    def db_view_gis(self):
        """Open database in a GIS GUI tool (if applicable)"""
        if self.db_backend.name == 'sqlite':
            utils.call(
                [self.config.spatialite_gis_path,
                    self.db_backend.conn_info['path']],
                bg=True)
        else:
            raise NotImplementedError("Not implemented for this backend")

    @command()
    def db_shell(self, sql=None):
        """Open a SQL interpreter on the database or execute the given SQL"""
        self.db_backend.shell(sql)

    @command()
    def db_dump(self, db_backend=None, prefix=''):
        """Dump database to text file"""

        if not db_backend:
            db_backend = self.db_backend
        output = db_backend.dump()

        max_id = 0
        for d in os.listdir(self.db_path):
            if 'sql' not in d:
                continue
            try:
                max_id = max(max_id, int(d.split('-')[1]))
            except:
                pass

        target = join(
            self.db_path, '{prefix}config-{id:03}-{date}.sql.gz'.format(
                prefix=prefix, id=max_id + 1,
                date=datetime.datetime.now().strftime('%Y%m%d-%H%M')))

        gzip.open(target, 'wb').write(output)
        log.info('Db dumped to %r', target)

        uncompressed_fname = 'config_{project_name}.sql'.format(
                project_name=self.config.project_name)
        uncompressed_target = join(self.db_path, uncompressed_fname)
        open(uncompressed_target, 'wb').write(output)
        return uncompressed_fname

    @command()
    def db_open_dump(self):
        """Open the latest database dump in a text editor"""

        uncompressed_target = join(
            self.db_path, 'config_{project_name}.sql'.format(
                project_name=self.config.project_name))

        if os.path.isfile(self.config.editor_path):
            utils.call([self.config.editor_path, uncompressed_target], bg=True)

    @command()
    def db_restore(self, db_dump):
        """Restore a database from a text file dump"""
        all_dumps = sorted(d for d in os.listdir(self.db_path) if 'sql' in d)

        if not db_dump or db_dump == '-':
            log.fatal('Name of dump (--dump) should be provided. '
                'Possible dumps:')
            for d in all_dumps:
                print d
            return
        dumps = [d for d in all_dumps if db_dump in d]
        if len(dumps) != 1:
            raise Exception('Not only one dump matches %r (possible dumps: %r)' %
                (db_dump, all_dumps))

        sql_file = join(self.db_path, dumps[0])
        log.info('Restoring %s', sql_file)

        if sql_file.endswith('.gz'):
            sql = gzip.open(sql_file, 'rb').read()
        else:
            sql = open(sql_file, 'rb').read()

        log.info('Restoring %s bytes of sql', len(sql))
        self.db_backend.restore(sql)

    @command()
    def db_sync_to_files(self):
        db_sync.sync_to_files(self)

    @command()
    def db_sync_from_files(self, host, use_http):
        db_sync.sync_from_files(self, host, use_http)

    @command()
    def db_sync(self, host=None):
        db_sync.sync(self, host)

    # Commands for syncing or managing a remote project.

    @command()
    def db_remote_dump(self):
        """Dump database from remote server"""

        if not self.config.remote_server:
            raise Exception('No remote server defined in configuration')

        @contextlib.contextmanager
        def remote_transaction_sqlite(conn_info):
            remote_db_local_path = join(self.db_path, 'remote_config.db3')

            log.info('Fetching db to %r', remote_db_local_path)
            utils.rsync(self.config, '{remote_server}:' + self.config.remote_db_path,
                utils.to_cygwin_path(remote_db_local_path))

            remote_conn_info = self.db_backend.conn_info.copy()
            remote_conn_info['path'] = remote_db_local_path

            yield remote_conn_info

        @contextlib.contextmanager
        def remote_transaction_mysql(conn_info):
            MYSQL_FORWARDED_PORT = 33000

            utils.kill_listening_processes(MYSQL_FORWARDED_PORT)

            if utils.can_connect(MYSQL_FORWARDED_PORT):
                raise Exception('MySQL tunnel port (%s) is not '
                    'available' % MYSQL_FORWARDED_PORT)

            ssh_command_line = utils.ssh_command_line(
                self.config,
                extra_opts='-N -L {forwarded_port}:localhost:3306'.format(
                    forwarded_port=MYSQL_FORWARDED_PORT))
            log.debug('ssh command line for tunnel: %s', ssh_command_line)
            p = subprocess.Popen(ssh_command_line, shell=True)
            log.info('Waiting a bit for the tunnel to establish...')
            time.sleep(3)

            remote_conn_info = db_backends.ConnectionInfo(
                self.config.remote_conn_string)
            remote_conn_info.data['port'] = MYSQL_FORWARDED_PORT
            remote_conn_info.data['host'] = '127.0.0.1'

            yield remote_conn_info
            p.kill()

        if self.db_backend.name == 'sqlite':
            remote_transaction = remote_transaction_sqlite
        elif self.db_backend.name == 'mysql':
            remote_transaction = remote_transaction_mysql
        else:
            raise Exception('Unsupported backend: %r', self.db_backend.name)

        with remote_transaction(self.db_backend.conn_info.copy()) as remote_conn_info:
            remote_conn_string = remote_conn_info.conn_string
            log.info('Remote connection string: %r', remote_conn_string)
            remote_backend = db_backends.create_backend(self.env, remote_conn_string)
            return self.db_dump(remote_backend, self.config.remote_dump_prefix)

    @command()
    def db_remote_restore(self):
        sql_fname = self.db_remote_dump()
        self.db_restore(sql_fname)

    @command()
    def ssh(self):
        """Open a ssh shell on the remote server"""
        utils.call(utils.ssh_command_line(self.config))

    @command()
    def imports(self, subcommand, template_id, import_id, dummy, no_mail, args):
        """Imports management"""
        if subcommand == 'list_templates':
            import_templates = self.imports_manager.get_import_templates()
            for import_template in import_templates:
                log.info('Import template: id=%s label=%r', import_template.id,
                    import_template.label)
                for import_ in import_template.get_imports():
                    log.info('Import: %s, path=%s', import_.id, import_.path)
        elif subcommand == 'list_imports':
            import_template = self.imports_manager.get_import_template(
                template_id)
            for import_ in import_template.get_imports():
                log.info('Import: %s, path=%s', import_.id, import_.path)
        elif subcommand == 'create':
            import_template = self.imports_manager.get_import_template(
                template_id)
            create_args = {}
            if args:
                create_args = json.loads(args)
            import_ = import_template.create_import(create_args)
            log.info('Created import with id: %s', import_.id)
        elif subcommand == 'execute':
            import_ = self.imports_manager.get_import(template_id, import_id)
            import_.execute(None, dummy, no_mail)
        else:
            raise Exception('Unknown import subcommand: %s', subcommand)

    # System install/uninstall

    def _get_tools(self):
        tools = [
            external_tools.SyntheseSupervisor(self),
            Apache(self),
            external_tools.WSGI(self),
        ]
        if self.config.use_udf_proxy:
            tools.append(external_tools.UDFProxySupervisor(self))
        if self.config.use_s3_proxy:
            tools.append(external_tools.S3ProxySupervisor(self))

        return tools

    def _process_dot_in_file(self, content, vars, template_dir):
        def get_template(name, from_template):
            path = join(template_dir, name)
            return from_template.__class__.from_filename(
                path, namespace=from_template.namespace,
                get_template=from_template.get_template)

        tmpl = tempita.Template(content, get_template=get_template)
        return tmpl.substitute(vars)

    def _replace_dot_in_files(self):
        for path, dirlist, filelist in os.walk(self.path):
            for exclude in ['db', 'logs', 'imports', 'deploy', '.git', '.hg', '.svn']:
                if exclude in dirlist:
                    dirlist.remove(exclude)
            for name in filelist:
                if not name.endswith('.in'):
                    continue
                source = join(path, name)
                target = source[:-len('.in')]

                log.debug('Generating file: %r -> %r', source, target)

                with open(target, 'wb') as f:
                    content = open(source).read()
                    vars = {
                        'generated_warning': 'WARNING: THIS FILE IS GENERATED, '
                            'DO NOT EDIT!!',
                        'project': self,
                        'p': self,
                        'config': self.config,
                        'c': self.config,
                    }
                    content = self._process_dot_in_file(content, vars, path)
                    f.write(content)
                try:
                    shutil.copymode(source, target)
                except OSError, e:
                    # This might happen if the user is not the owner.
                    log.warn('Error while updating mode: %s', e)