def __init__(self, ref, fields, read_only=False, asynchronous_mode=True): # Load the database if isinstance(ref, Database) or isinstance(ref, WritableDatabase): self._db = ref else: path = lfs.get_absolute_path(ref) if read_only: self._db = Database(path) else: self._db = WritableDatabase(path, DB_OPEN) db = self._db self._asynchronous = asynchronous_mode self._fields = fields # Asynchronous mode if not read_only and asynchronous_mode: db.begin_transaction(False) # Load the xfields from the database self._metadata = {} self._key_field = None self._value_nb = 0 self._prefix_nb = 0 self._load_all_internal()
def make_database(path, size_min, size_max, fields=None, backend=None): """Create a new empty database if the given path does not exists or is a folder. """ path = lfs.get_absolute_path(path) if fields is None: fields = get_register_fields() # Init backend backend_cls = backends_registry[backend] backend_cls.init_backend(path, fields) # Ok return RWDatabase(path, size_min, size_max)
def make_catalog(uri, fields): """Creates a new and empty catalog in the given uri. fields must be a dict. It contains some informations about the fields in the database. It must contain at least the abspath key field. For example: fields = {'abspath': String(stored=True, indexed=True), 'name': Unicode(indexed=True), ...} """ path = lfs.get_absolute_path(uri) db = WritableDatabase(path, DB_CREATE) return Catalog(db, fields)
def __init__(self, ref, fields, read_only=False, asynchronous_mode=True, root=None): self.read_only = read_only # Load the database if isinstance(ref, (Database, WritableDatabase)): path = None self._db = ref else: path = lfs.get_absolute_path(ref) if read_only: self._db = Database(path) else: self._db = WritableDatabase(path, DB_OPEN) db = self._db self._asynchronous = asynchronous_mode self._fields = fields self.root = root # FIXME: There's a bug in xapian: # Wa cannot get stored values if DB not flushed #self.commit_each_transaction = root is None self.commit_each_transaction = True # Asynchronous mode if not read_only and asynchronous_mode: db.begin_transaction(self.commit_each_transaction) # Set XAPIAN_FLUSH_THRESHOLD os.environ["XAPIAN_FLUSH_THRESHOLD"] = "2000" # Load the xfields from the database self._metadata = {} self._value_nb = 0 self._prefix_nb = 0 self.transaction_abspaths = [] self._load_all_internal() if not read_only: self._init_all_metadata() # Catalog log if path: catalog_log = '{}/catalog.log'.format(path) self.logger = CatalogLogger(catalog_log) register_logger(self.logger, 'itools.catalog')
def make_git_database(path, size_min, size_max, fields=None): """Create a new empty Git database if the given path does not exists or is a folder. If the given path is a folder with content, the Git archive will be initialized and the content of the folder will be added to it in a first commit. """ path = lfs.get_absolute_path(path) # Git init open_worktree('%s/database' % path, init=True) # The catalog if fields is None: fields = get_register_fields() catalog = make_catalog('%s/catalog' % path, fields) # Ok database = RWDatabase(path, size_min, size_max) database.catalog = catalog return database
def make_catalog(uri, fields): """Creates a new and empty catalog in the given uri. If uri=None the catalog is made "in memory". fields must be a dict. It contains some informations about the fields in the database. By example: fields = {'id': Integer(is_key_field=True, is_stored=True, is_indexed=True), ...} """ # In memory if uri is None: db = inmemory_open() return Catalog(db, fields, asynchronous_mode=False) # In the local filesystem path = lfs.get_absolute_path(uri) db = WritableDatabase(path, DB_CREATE) return Catalog(db, fields)
def __init__(self, ref, fields, read_only=False, asynchronous_mode=True): # Load the database if isinstance(ref, (Database, WritableDatabase)): self._db = ref else: path = lfs.get_absolute_path(ref) if read_only: self._db = Database(path) else: self._db = WritableDatabase(path, DB_OPEN) db = self._db self._asynchronous = asynchronous_mode self._fields = fields # Asynchronous mode if not read_only and asynchronous_mode: db.begin_transaction(False) # Load the xfields from the database self._metadata = {} self._value_nb = 0 self._prefix_nb = 0 self._load_all_internal()
def __init__(self, ref, fields, read_only=False, asynchronous_mode=True): self.read_only = read_only # Load the database if isinstance(ref, (Database, WritableDatabase)): path = None self._db = ref else: path = lfs.get_absolute_path(ref) if read_only: self._db = Database(path) else: self._db = WritableDatabase(path, DB_OPEN) db = self._db self._asynchronous = asynchronous_mode self._fields = fields # FIXME: There's a bug in xapian: # Wa cannot get stored values if DB not flushed self.commit_each_transaction = True # Asynchronous mode if not read_only and asynchronous_mode: db.begin_transaction(self.commit_each_transaction) # Set XAPIAN_FLUSH_THRESHOLD os.environ["XAPIAN_FLUSH_THRESHOLD"] = "2000" # Load the xfields from the database self._metadata = {} self._value_nb = 0 self._prefix_nb = 0 self._load_all_internal() if not read_only: self._init_all_metadata() # Catalog log if path: catalog_log = '{}/catalog.log'.format(path) self.logger = CatalogLogger(catalog_log) register_logger(self.logger, 'itools.catalog')
def __init__(self, target, read_only=False, cache_size=None, profile_space=False): target = lfs.get_absolute_path(target) self.target = target self.read_only = read_only # Set timestamp self.timestamp = str(int(time() / 2)) # Load the config config = get_config(target) self.config = config load_modules(config) self.modules = config.get_value('modules') # Contact Email self.smtp_from = config.get_value('smtp-from') # Full-text indexing self.index_text = config.get_value('index-text', type=Boolean, default=True) # Accept cors self.accept_cors = config.get_value( 'accept-cors', type=Boolean, default=False) # Profile Memory if profile_space is True: import guppy.heapy.RM # The database if cache_size is None: cache_size = config.get_value('database-size') if ':' in cache_size: size_min, size_max = cache_size.split(':') else: size_min = size_max = cache_size size_min, size_max = int(size_min), int(size_max) read_only = read_only or config.get_value('database-readonly') database = get_database(target, size_min, size_max, read_only) self.database = database # Find out the root class root = get_root(database) # Load environment file root_file_path = inspect.getfile(root.__class__) environement_path = str(get_reference(root_file_path).resolve('environment.json')) if vfs.exists(environement_path): with open(environement_path, 'r') as f: data = f.read() self.environment = json.loads(data) # Init fake context context = get_fake_context(database, root.context_cls) context.server = self # Initialize access_log = '%s/log/access' % target super(Server, self).__init__(root, access_log=access_log) # Email service self.spool = lfs.resolve2(self.target, 'spool') spool_failed = '%s/failed' % self.spool if not lfs.exists(spool_failed): lfs.make_folder(spool_failed) # Configuration variables get_value = config.get_value self.smtp_host = get_value('smtp-host') self.smtp_login = get_value('smtp-login', default='').strip() self.smtp_password = get_value('smtp-password', default='').strip() # Email is sent asynchronously self.flush_spool() # Logging log_file = '%s/log/events' % target log_level = config.get_value('log-level') if log_level not in log_levels: msg = 'configuraion error, unexpected "%s" value for log-level' raise ValueError, msg % log_level log_level = log_levels[log_level] logger = Logger(log_file, log_level, rotate=timedelta(weeks=3)) register_logger(logger, None) logger = WebLogger(log_file, log_level) register_logger(logger, 'itools.web') # Session timeout self.session_timeout = get_value('session-timeout') # Register routes self.register_dispatch_routes()