def __init__(self, context, path, pd_path=None, name=None, loglevel=logging.INFO, **kwargs): self.context = context self.path = os.path.expanduser(path) if not name: name = context self.config = config.Config.instance() lazy_write = utils.get_interval(self.config, "LAZY WRITE", (context)) self.pd_filename = f".cb.{context}-lite.json.bz2" if pd_path: pd_file = f"{pd_path}/{self.pd_filename}" else: pd_file = f"{self.path}/{self.pd_filename}" super().__init__(pd_file, lazy_write=lazy_write) self.logger = logging.getLogger(logger_str(__class__) + " " + name) self.logger.setLevel(loglevel) self.ignored_suffixes = {} self.stat = stats.Statistic(buckets=(0, 5, 10, 30)) self.report_timer = elapsed.ElapsedTimer()
def __init__(self, context): super().__init__() self.context = context self.logger = logging.getLogger(utils.logger_str(__class__) \ + " " + context) # self.logger.setLevel(logging.INFO) self.config = config.Config.instance() self.copies = int(self.config.get(self.context, "copies", 2)) self.path = config.path_for(self.config.get(self.context, "source")) self.scanner = scanner.Scanner(self.context, self.path) lazy_write = utils.str_to_duration( self.config.get(context, "LAZY WRITE", 5)) # TODO: support expiration self.rescan = utils.str_to_duration( self.config.get(self.context, "rescan")) self.clients = persistent_dict.PersistentDict( f"/tmp/cb.s{context}.json.bz2", lazy_write=lazy_write, cls=lock.Lock, expiry=self.rescan) self.drains = elapsed.ExpiringDict(300) # NOT persistent! self.locks = locker.Locker(5) # TODO: timers should relate to a configurable cycle time self.bailout = False self.stats = {'claims': 0, 'drops': 0} self.handling = False
def __init__(self, context): super().__init__() self.context = context self.config = config.Config.instance() self.logger = logging.getLogger(logger_str(__class__) + " " + context) self.logger.info(f"Creating clientlet {self.context}") self.path = config.path_for(self.config.get(self.context, "backup")) assert os.path.exists(self.path), f"{self.path} does not exist!" # ALL source contexts (we care a lot) self.sources = {} self.scanners = {} self.random_source_list = [] self.build_sources() lazy_write = utils.str_to_duration( self.config.get(context, "LAZY WRITE", 5)) # TODO: my cache of claims should expire in rescan/2 self.rescan = self.get_interval("rescan") // 2 self.claims = PersistentDict(f"/tmp/cb.c{context}.json.bz2", lazy_write=5, expiry=self.rescan) self.drops = 0 # count the number of times I drop a file self.stats = stats.Stats() self.update_allocation() self.bailing = False self.datagrams = {}
def __init__(self, context, path, **kwargs): self.context = context self.path = os.path.expanduser(path) if "name" in kwargs: name = kwargs["name"] else: name = context if "checksums" in kwargs: self.checksums = kwargs['checksums'] else: self.checksums = True self.config = config.Config.instance() self.pd_filename = f".cb.{context}.json.bz2" lazy_write = utils.str_to_duration( self.config.get(context, "LAZY WRITE", 5)) super().__init__(f"{self.path}/{self.pd_filename}", lazy_write=lazy_write, **kwargs) self.logger = logging.getLogger(logger_str(__class__) + " " + name) # self.logger.setLevel(logging.INFO) self.ignored_suffixes = {} self.report_timer = elapsed.ElapsedTimer() self.stat = stats.Statistic(buckets=(0, 5, 10, 30))
def __init__(self, hostname): self.hostname = hostname self.config = config.Config.instance() self.logger = logging.getLogger(utils.logger_str(__class__)) # self.logger.setLevel(logging.INFO) self.contexts = self.get_contexts() self.servlets = {} self.build_servlets()
def __init__(self, hostname): self.hostname = hostname self.config = config.Config.instance() self.logger = logging.getLogger(logger_str(__class__)) # ONLY my / relevant backup contexts self.backup_contexts = \ self.config.get_contexts_for_key_and_target("backup", hostname) self.clientlets = {} self.build_clientlets()
def __init__(self, context): super().__init__(context) self.source = self.config.get_source_for_context(context) self.path = config.path_for(self.source) persistent_dict_file = f"{self.path}/.ghetto_cluster/" \ f"source.{context}.json" self.states = persistent_dict.PersistentDict(persistent_dict_file, \ self.config.getOption("LAZY_WRITE", 5)) self.logger = logging.getLogger(logger_str(__class__))
def __init__(self, filename, lazy_timer=0, **kwargs): self.masterFilename = filename self.transactionName = None self.data = {} self.logger = logging.getLogger(logger_str(__class__)) self.lazy_timer = lazy_timer self.dirty = False self.read() self.clear_dirtybits() self.timer = elapsed.ElapsedTimer() if "metadata" in kwargs: self.metadata_key = kwargs["metadata"] else: self.metadata_key = "__metadata__"
def __init__(self, context, dest, source): super().__init__(context) self.dest = dest if not source.endswith("/"): self.source = source + "/" else: self.source = source hostname = config.host_for(dest) self.path = config.path_for(dest) self.states_filename = f"{self.path}/.ghetto_cluster/" \ f"{hostname}.{context}.json" self.states = persistent_dict.PersistentDict(self.states_filename, \ self.config.getOption("LAZY_WRITE", 5)) self.testing = False self.verbose = self.config.getOption("verbose", "False") == "True" self.logger = logging.getLogger(logger_str(__class__))
def __init__(self, filename, loglevel=logging.INFO, *args, **kwargs): self.logger = logging.getLogger(logger_str(__class__)) self.logger.setLevel(loglevel) self.masterFilename = filename self.args = args self.kwargs = kwargs if 'lazy_write' in kwargs: self.lazy_timer = kwargs['lazy_write'] else: self.lazy_timer = 0 self.data = {} if 'cls' in kwargs: self.cls = kwargs['cls'] else: self.cls = None self.lock = threading.RLock() self.read() self.clear_dirtybits() self.timer = elapsed.ElapsedTimer()
def __init__(self, context): super().__init__() self.context = context self.config = config.Config.instance() self.logger = logging.getLogger(logger_str(__class__) + " " + context) self.logger.info(f"Creating clientlet {self.context}") self.path = config.path_for(self.config.get(self.context, "backup")) assert os.path.exists(self.path), f"{self.path} does not exist!" # ALL source contexts (we care a lot) self.sources = {} self.scanners = {} self.random_source_list = [] self.build_sources() self.drops = 0 # count the number of times I drop a file self.update_allocation() self.bailing = False self.sockets = {}
def __init__(self, context): super().__init__() self.context = context self.logger = logging.getLogger(utils.logger_str(__class__) \ + " " + context) # self.logger.setLevel(logging.INFO) self.config = config.Config.instance() self.copies = int(self.config.get(self.context, "copies", 2)) self.path = config.path_for(self.config.get(self.context, "source")) self.scanner = scanner.Scanner(self.context, self.path) # TODO: rename this "clients" self.files = dict() # NOT persistent! On startup assume nothing self.drains = elapsed.ExpiringDict(300) # NOT persistent! self.locks = locker.Locker(5) # TODO: timers should relate to a configurable cycle time self.bailout = False self.stats = {'claims': 0, 'drops': 0} self.handling = False
def __init__(self): self.data = {} # self.config = {} self.logger = logging.getLogger(utils.logger_str(__class__)) self.logger.setLevel(logging.INFO) self.master = None
def __init__(self, servlets, port=8888): super().__init__() self.servlets = {} self.port = port self.logger = logging.getLogger(utils.logger_str(__class__))
def __init__(self, configfile, hostname=None): self.hostname = hostname self.config = config.Config.instance() self.config.init(configfile, hostname) self.testing = self.config.getOption("testing", "True") == "True" self.logger = logging.getLogger(logger_str(__class__))