class Classifer(object): def __init__(self): self.channels = DictWithLists() def add_data(self, keyable, func): # hmm, this should rotate every 10 seconds or so, but moving over the # old data is hard (can't write out-of-order) #key = sha.sha(id(o)).hexdigest()[0] # this is technically round-robin key = keyable self.channels.push_to_row(key, func) def rem_data(self, key): try: l = self.channels.poprow(key) l.clear() except KeyError: pass def rotate_data(self): # the removes the top-most row from the ordereddict k = self.channels.iterkeys().next() l = self.channels.poprow(k) data = l.popleft() # this puts the whole row at the bottom of the ordereddict self.channels.setrow(k, l) return data def __len__(self): return len(self.channels)
def __init__(self, doneflag, add_task, external_add_task, max_files_open, num_disk_threads): self.add_task = add_task self.file_to_torrent = {} self.waiting_ops = [] self.active_file_to_handles = DictWithSets() self.open_file_to_handles = DictWithLists() self.set_max_files_open(max_files_open)
class FilePool(object): def __init__(self, doneflag, add_task, external_add_task, max_files_open, num_disk_threads): self.doneflag = doneflag self.external_add_task = external_add_task self.file_to_torrent = {} self.free_handle_condition = threading.Condition() self.active_file_to_handles = DictWithSets() self.open_file_to_handles = DictWithLists() self.set_max_files_open(max_files_open) self.diskq = Queue.Queue() for i in xrange(num_disk_threads): t = threading.Thread(target=self._disk_thread, name="disk_thread-%s" % (i+1)) t.start() self.doneflag.addCallback(self.finalize) def finalize(self, r=None): # re-queue self so all threads die. we end up with one extra event on # the queue, but who cares. self._create_op(self.finalize) def close_all(self): failures = {} self.free_handle_condition.acquire() while self.get_open_file_count() > 0: while len(self.open_file_to_handles) > 0: filename, handle = self.open_file_to_handles.popitem() try: handle.close() except Exception, e: failures[self.file_to_torrent[filename]] = e self.free_handle_condition.notify() if self.get_open_file_count() > 0: self.free_handle_condition.wait(1) self.free_handle_condition.release() for torrent, e in failures.iteritems(): torrent.got_exception(e)
def __init__(self, doneflag, add_task, external_add_task, max_files_open, num_disk_threads): self.doneflag = doneflag self.external_add_task = external_add_task self.file_to_torrent = {} self.free_handle_condition = threading.Condition() self.active_file_to_handles = DictWithSets() self.open_file_to_handles = DictWithLists() self.set_max_files_open(max_files_open) self.diskq = Queue.Queue() for i in xrange(num_disk_threads): t = threading.Thread(target=self._disk_thread, name="disk_thread-%s" % (i+1)) t.start() self.doneflag.addCallback(self.finalize)
class FilePool(object): def __init__(self, doneflag, add_task, external_add_task, max_files_open, num_disk_threads): self.add_task = add_task self.file_to_torrent = {} self.waiting_ops = [] self.active_file_to_handles = DictWithSets() self.open_file_to_handles = DictWithLists() self.set_max_files_open(max_files_open) def close_all(self): df = Deferred() self._close_all(df) return df def _close_all(self, df): failures = {} while len(self.open_file_to_handles) > 0: filename, handle = self.open_file_to_handles.popitem() try: handle.close() except: failures[self.file_to_torrent[filename]] = sys.exc_info() for torrent, e in failures.iteritems(): torrent.got_exception(e) if self.get_open_file_count() > 0: # it would be nice to wait on the deferred for the outstanding ops self.add_task(0.5, self._close_all, df) else: df.callback(True) def close_files(self, file_set): df = Deferred() self._close_files(df, file_set) return df def _close_files(self, df, file_set): exc_info = None done = False filenames = self.open_file_to_handles.keys() for filename in filenames: if filename not in file_set: continue handles = self.open_file_to_handles.poprow(filename) for handle in handles: try: handle.close() except: exc_info = sys.exc_info() done = True for filename in file_set.iterkeys(): if filename in self.active_file_to_handles: done = False break if exc_info is not None: df.errback(exc_info) if not done: # it would be nice to wait on the deferred for the outstanding ops self.add_task(0.5, self._close_files, df, file_set) else: df.callback(True) def set_max_files_open(self, max_files_open): if max_files_open <= 0: max_files_open = 1e100 self.max_files_open = max_files_open self.close_all() def add_files(self, files, torrent): for filename in files: if filename in self.file_to_torrent: raise BTFailure(_("File %s belongs to another running torrent") % filename) for filename in files: self.file_to_torrent[filename] = torrent def remove_files(self, files): for filename in files: del self.file_to_torrent[filename] def _ensure_exists(self, filename, length=0): if not os.path.exists(filename): f = os.path.split(filename)[0] if f != '' and not os.path.exists(f): os.makedirs(f) f = file(filename, 'wb') make_file_sparse(filename, f, length) f.close() def get_open_file_count(self): t = self.open_file_to_handles.total_length() t += self.active_file_to_handles.total_length() return t def free_handle_notify(self): if self.waiting_ops: args = self.waiting_ops.pop(0) self._produce_handle(*args) def acquire_handle(self, filename, for_write, length=0): df = Deferred() # abort disk ops on unregistered files if filename not in self.file_to_torrent: df.callback(None) return df if self.active_file_to_handles.total_length() == self.max_files_open: self.waiting_ops.append((df, filename, for_write, length)) else: self._produce_handle(df, filename, for_write, length) return df def _produce_handle(self, df, filename, for_write, length): if filename in self.open_file_to_handles: handle = self.open_file_to_handles.pop_from_row(filename) if for_write and not is_open_for_write(handle.mode): handle.close() handle = open_sparse_file(filename, 'rb+', length=length) #elif not for_write and is_open_for_write(handle.mode): # handle.close() # handle = file(filename, 'rb', 0) else: if self.get_open_file_count() == self.max_files_open: oldfname, oldhandle = self.open_file_to_handles.popitem() oldhandle.close() self._ensure_exists(filename, length) if for_write: handle = open_sparse_file(filename, 'rb+', length=length) else: handle = open_sparse_file(filename, 'rb', length=length) self.active_file_to_handles.push_to_row(filename, handle) df.callback(handle) def release_handle(self, filename, handle): self.active_file_to_handles.remove_fom_row(filename, handle) self.open_file_to_handles.push_to_row(filename, handle) self.free_handle_notify()
def __init__(self): self.channels = DictWithLists()