def clean_junk_locks(self): for path, dirnames, filenames in walk_on_path(self.path): filenames = filenames or [] for dirname in dirnames: folder_path = join_paths(path, dirname) for filename in get_dir_filenames(folder_path): if not filename.startswith('.'): filenames.append(join_paths(dirname, filename)) for filename in filenames: filename = to_string(filename) if filename.startswith('.'): continue file_path = join_paths(path, filename) if '.' in filename: # Delete inactive positions locks binary = get_file_binary(file_path, mode='r') if binary: info = binary.split() if len(info) >= 2 and info[0] == DOMAIN_NAME and maybe_integer(info[1]): try: getpgid(int(info[1])) except OSError as error: if error.errno is errno.ESRCH: remove_file_quietly( file_path, retries=self.retries, retry_errno=self.retry_errno) else: # Clean locks wait list # Get last modified time, to check if file as been updated in the process modified_time = file_modified_time(file_path) if modified_time: binary = get_file_binary(file_path, mode='r') if binary: # Find alive locks keep_codes = binary.splitlines() for i, line in enumerate(keep_codes): info = line.split() if len(info) >= 2 and info[0] == DOMAIN_NAME and maybe_integer(info[1]): try: getpgid(int(info[1])) except OSError as error: if error.errno is errno.ESRCH: # Add empty line to keep position number keep_codes[i] = '' # Check if file as been updated in the process last_modified_time = file_modified_time(file_path) if last_modified_time and modified_time == last_modified_time: if not any(keep_codes): remove_file_quietly(file_path) else: with open(file_path, 'w') as f: f.write(NEW_LINE.join(keep_codes))
def create_file_path(self, file_date=None): file_date = maybe_date(file_date or TODAY_DATE()) base_folder_path = file_date.strftime('%Y%m/%d') last_folder = 0 full_base_folder_path = join_paths(self.storage_path, base_folder_path) folders = sorted(int(i) for i in get_dir_filenames(full_base_folder_path) if i.isdigit()) if folders: last_folder = folders[-1] folder_path = join_paths(base_folder_path, last_folder) full_folder_path = join_paths(self.storage_path, folder_path) if len(get_dir_filenames(full_folder_path)) >= self.max_blocks_per_folder: folder_path = join_paths(base_folder_path, last_folder + 1) while True: filename = make_unique_hash(length=80) path = join_paths(folder_path, filename) full_path = join_paths(self.storage_path, path) if not isfile(full_path): return full_path, path
def mailer_queue_send(self): queue_path = self.settings.get('queue_path') if queue_path: subdir_new = join_paths(queue_path, 'new') subdir_cur = join_paths(queue_path, 'cur') while True: for f in get_dir_filenames(subdir_new): if not f.startswith('.'): break else: for f in get_dir_filenames(subdir_cur): if not f.startswith('.'): break else: break # Break while qp = self.api_session_manager.queue_processor( self.api_session_manager.mailer.smtp_mailer, self.settings['queue_path']) qp.send_messages()
def unlock(self, name): name_256 = make_sha256(name) pattern_name = name_256 + '.' folder_path = join_paths(self.path, name_256[0]) # Lookup for locked positions files = [] for filename in get_dir_filenames(folder_path): if filename.startswith(pattern_name): position = int(filename.split('.', 1)[1]) files.append((position, filename)) if files: files.sort() for position, filename in files: file_path = join_paths(folder_path, filename) if remove_file( file_path, retries=self.retries, retry_errno=self.retry_errno): return True # If no position found, delete base lock return remove_file_quietly(self.get_file_path(name), retries=self.retries, retry_errno=self.retry_errno)