def __init__(self, path=None, size_min=4800, size_max=5200, catalog=None, backend='lfs'): # Init path self.path = path # Init backend self.backend_cls = backends_registry[backend] # The "git add" arguments self.added = set() self.changed = set() self.removed = set() self.has_changed = False # init backend self.backend = self.backend_cls(self.path) # A mapping from key to handler self.cache = LRUCache(size_min, size_max, automatic=False) # TODO FIXME Catalog should be moved into backend # 7. Get the catalog if catalog: self.catalog = catalog else: if self.path: self.catalog = self.get_catalog() # Log catalog_log = '{}/database.log'.format(self.path) self.logger = Logger(catalog_log) register_logger(self.logger, 'itools.database')
def __init__(self, root, address=None, port=None, access_log=None, event_log=None, pid_file=None, profile=None): if address is None: address = "" if port is None: port = 8080 register_logger(WebLogger(log_file=event_log), "itools.web") super(WebServer, self).__init__(address, port, access_log, pid_file, profile) # The application's root self.root = root
def __init__(self, root, access_log=None, event_log=None): super(WebServer, self).__init__() # The application's root self.root = root # Access log logger = AccessLogger(access_log, rotate=timedelta(weeks=3)) register_logger(logger, 'itools.web_access') # Events log logger = WebLogger(event_log) register_logger(logger, 'itools.web') # Useful the current uploads stats self.upload_stats = {}
def __init__(self, ref, fields, read_only=False, asynchronous_mode=True, root=None): self.read_only = read_only # Load the database if isinstance(ref, (Database, WritableDatabase)): path = None self._db = ref else: path = lfs.get_absolute_path(ref) if read_only: self._db = Database(path) else: self._db = WritableDatabase(path, DB_OPEN) db = self._db self._asynchronous = asynchronous_mode self._fields = fields self.root = root # FIXME: There's a bug in xapian: # Wa cannot get stored values if DB not flushed #self.commit_each_transaction = root is None self.commit_each_transaction = True # Asynchronous mode if not read_only and asynchronous_mode: db.begin_transaction(self.commit_each_transaction) # Set XAPIAN_FLUSH_THRESHOLD os.environ["XAPIAN_FLUSH_THRESHOLD"] = "2000" # Load the xfields from the database self._metadata = {} self._value_nb = 0 self._prefix_nb = 0 self.transaction_abspaths = [] self._load_all_internal() if not read_only: self._init_all_metadata() # Catalog log if path: catalog_log = '{}/catalog.log'.format(path) self.logger = CatalogLogger(catalog_log) register_logger(self.logger, 'itools.catalog')
def __init__(self, root, address=None, port=None, access_log=None, event_log=None, pid_file=None, profile=None): if address is None: address = '' if port is None: port = 8080 register_logger(WebLogger(log_file=event_log), 'itools.web') super(WebServer, self).__init__(address, port, access_log, pid_file, profile) # The application's root self.root = root
def __init__(self, ref, fields, read_only=False, asynchronous_mode=True): self.read_only = read_only # Load the database if isinstance(ref, (Database, WritableDatabase)): path = None self._db = ref else: path = lfs.get_absolute_path(ref) if read_only: self._db = Database(path) else: self._db = WritableDatabase(path, DB_OPEN) db = self._db self._asynchronous = asynchronous_mode self._fields = fields # FIXME: There's a bug in xapian: # Wa cannot get stored values if DB not flushed self.commit_each_transaction = True # Asynchronous mode if not read_only and asynchronous_mode: db.begin_transaction(self.commit_each_transaction) # Set XAPIAN_FLUSH_THRESHOLD os.environ["XAPIAN_FLUSH_THRESHOLD"] = "2000" # Load the xfields from the database self._metadata = {} self._value_nb = 0 self._prefix_nb = 0 self._load_all_internal() if not read_only: self._init_all_metadata() # Catalog log if path: catalog_log = '{}/catalog.log'.format(path) self.logger = CatalogLogger(catalog_log) register_logger(self.logger, 'itools.catalog')
def __init__(self, target, read_only=False, cache_size=None, profile_space=False): target = lfs.get_absolute_path(target) self.target = target self.read_only = read_only # Set timestamp self.timestamp = str(int(time() / 2)) # Load the config config = get_config(target) self.config = config load_modules(config) self.modules = config.get_value('modules') # Contact Email self.smtp_from = config.get_value('smtp-from') # Full-text indexing self.index_text = config.get_value('index-text', type=Boolean, default=True) # Accept cors self.accept_cors = config.get_value( 'accept-cors', type=Boolean, default=False) # Profile Memory if profile_space is True: import guppy.heapy.RM # The database if cache_size is None: cache_size = config.get_value('database-size') if ':' in cache_size: size_min, size_max = cache_size.split(':') else: size_min = size_max = cache_size size_min, size_max = int(size_min), int(size_max) read_only = read_only or config.get_value('database-readonly') database = get_database(target, size_min, size_max, read_only) self.database = database # Find out the root class root = get_root(database) # Load environment file root_file_path = inspect.getfile(root.__class__) environement_path = str(get_reference(root_file_path).resolve('environment.json')) if vfs.exists(environement_path): with open(environement_path, 'r') as f: data = f.read() self.environment = json.loads(data) # Init fake context context = get_fake_context(database, root.context_cls) context.server = self # Initialize access_log = '%s/log/access' % target super(Server, self).__init__(root, access_log=access_log) # Email service self.spool = lfs.resolve2(self.target, 'spool') spool_failed = '%s/failed' % self.spool if not lfs.exists(spool_failed): lfs.make_folder(spool_failed) # Configuration variables get_value = config.get_value self.smtp_host = get_value('smtp-host') self.smtp_login = get_value('smtp-login', default='').strip() self.smtp_password = get_value('smtp-password', default='').strip() # Email is sent asynchronously self.flush_spool() # Logging log_file = '%s/log/events' % target log_level = config.get_value('log-level') if log_level not in log_levels: msg = 'configuraion error, unexpected "%s" value for log-level' raise ValueError, msg % log_level log_level = log_levels[log_level] logger = Logger(log_file, log_level, rotate=timedelta(weeks=3)) register_logger(logger, None) logger = WebLogger(log_file, log_level) register_logger(logger, 'itools.web') # Session timeout self.session_timeout = get_value('session-timeout') # Register routes self.register_dispatch_routes()
# Command line usage = 'usine.py [options] <module> <item> <action>...' parser = OptionParser(usage, description='foo', formatter=HelpFormatter()) parser.add_option('--offline', action='store_true', help='In this mode the source code will not be synchronized from the ' 'mirror, and remote actions will be disabled.') parser.add_option('-b', '--branch', default='master', help='The branch to use (default: master), this option only applies ' ' to some actions.') options, args = parser.parse_args() # Init logger log_file_path = expanduser('~/.usine/usine.log') logger = UsineLogger(log_file_path) register_logger(logger, None) # Configuration error = config.load() if error: print error exit(1) config.options = options # Case 0: Nothing, print help if not args: print 'Usage:', usage print print 'Modules:' print for name in sorted(modules):