def get_token_file_path(self, token_256): token_256 = to_string(token_256) return normpath( join_paths( self.settings['token.path'], token_256[0], token_256))
def __init__(self, *args, **kwargs): super(BaseMailerSessionManager, self).__init__(*args, **kwargs) pyramid_mailer = _import_module('pyramid_mailer') self.mailer = pyramid_mailer.mailer_factory_from_settings(self.settings, prefix='') pyramid_mailer_message = _import_module('pyramid_mailer.message') self.message_cls = pyramid_mailer_message.Message self.attachment_cls = pyramid_mailer_message.Attachment if self.settings.get('queue_path'): make_dir(self.settings['queue_path']) make_dir(join_paths(self.settings['queue_path'], 'cur')) make_dir(join_paths(self.settings['queue_path'], 'tmp')) make_dir(join_paths(self.settings['queue_path'], 'new')) sendmail_queue = _import_module('repoze.sendmail.queue') self.queue_processor = sendmail_queue.QueueProcessor self.transaction = _import_module('transaction') self.__dict__.setdefault('__middlewares__', []).append(RepozeTMMiddleware)
def mailer_queue_send(self): queue_path = self.settings.get('queue_path') if queue_path: subdir_new = join_paths(queue_path, 'new') subdir_cur = join_paths(queue_path, 'cur') while True: for f in get_dir_filenames(subdir_new): if not f.startswith('.'): break else: for f in get_dir_filenames(subdir_cur): if not f.startswith('.'): break else: break # Break while qp = self.api_session_manager.queue_processor( self.api_session_manager.mailer.smtp_mailer, self.settings['queue_path']) qp.send_messages()
def __init__(self, *args, **kwargs): super(BaseJobsManager, self).__init__(*args, **kwargs) self.save_reports = asbool(self.settings.get('save_reports', True)) self.server_domain_name = self.settings.get('server_domain_name') self.active = bool( not self.server_domain_name or self.server_domain_name == DOMAIN_NAME) self.domain_names = set(self.settings.get('domain_names', '')) self.domain_names.add(DOMAIN_NAME) try: self.transaction = _import_module('transaction') except ImportError: self.transaction = None if self.active: temporary_dir = gettempdir() domain_start_filename = 'jobs domain %s started' % DOMAIN_NAME domain_start_file_path = join_paths(temporary_dir, domain_start_filename) lock_key = 'jobs monitor start check' self.config.cache.lock(lock_key, timeout=10) try: start_thread = not isfile(domain_start_file_path) if not start_thread: try: with open(domain_start_file_path, 'r') as f: process_id = int(f.read()) except (IOError, ValueError): start_thread = True else: try: getpgid(process_id) except OSError as error: if error.errno is errno.ESRCH: start_thread = True else: raise if start_thread: with open(domain_start_file_path, 'w') as f: f.write(str(PROCESS_ID)) finally: self.config.cache.unlock(lock_key) # Start only one Thread for each domain if start_thread: start_system_thread('jobs_monitor', self.run_monitor) print_('Running jobs monitor on PID %s' % PROCESS_ID)
def delete_file_paths(self, *ids): if not ids: return False ids = maybe_set(ids) # Get existing files blocks blocks_ids = set( f.file_id_block for f in ( self.session .query(FileBlock.file_id_block) .filter(FileBlock.file_id_path.in_(ids)) .all())) # Check if we can delete some file block relations delete_block_ids = blocks_ids.difference( f.file_id_block for f in ( self.session .query(FileBlock.file_id_block) .filter(FileBlock.file_id_block.in_(blocks_ids)) .filter(FileBlock.file_id_path.notin_(ids)) .all())) delete_paths = None if delete_block_ids: # Get paths to delete delete_paths = set( b.path for b in ( self.session .query(BlockPath.path) .filter(BlockPath.id.in_(delete_block_ids)) .all())) # Delete blocks relations self.direct_delete(FileBlock, FileBlock.file_id_path.in_(ids)) # Delete files paths from DB self.direct_delete(FilePath, FilePath.id.in_(ids)) if delete_block_ids: # Delete blocks paths from DB self.direct_delete(BlockPath, BlockPath.id.in_(delete_block_ids)) # Delete blocks paths from storage for path in delete_paths: remove_file_quietly(join_paths(self.storage_path, path)) return True
def __init__(self, *args, **kwargs): super(BasePolicySessionManager, self).__init__(*args, **kwargs) if 'token.session_reference_path' not in self.settings: self.settings['token.session_reference_path'] = join_paths( self.settings['token.path'], 'reference') # Jobs settings authorization = self.settings.get('authorization_session') if authorization: self.authorization_session = get_object_on_path(authorization) else: self.authorization_session = AuthenticatedSession
def read(self, size=-1): if size == 0: return b'' try: open_block = self.blocks[self.block_position] except IndexError: return b'' if isinstance(open_block, string_types): open_block = self.blocks[self.block_position] = get_open_file(join_paths(self.storage_path, open_block)) binary = open_block.read(size) if size > 0: size -= len(binary) if size <= 0: return binary self.block_position += 1 binary += self.read(size) return binary
def __init__( self, path, expire=None, retry_errno=None, retries=3, **lock_settings): self.expire = maybe_integer(expire) self.path = make_dir(path) self.retries = maybe_integer(retries) or 3 self.retry_errno = maybe_set(retry_errno) self.retry_errno.update(DEFAULT_RETRY_ERRNO) # Lock settings settings = {} for key, value in list(lock_settings.items()): if key.startswith('lock_'): settings[key.split('lock_', 1)[1]] = value lock_path = settings.pop('path', None) or join_paths(self.path, 'locks') self.lockme = LockMe(lock_path, **settings)
def create_file_path(self, file_date=None): file_date = maybe_date(file_date or TODAY_DATE()) base_folder_path = file_date.strftime('%Y%m/%d') last_folder = 0 full_base_folder_path = join_paths(self.storage_path, base_folder_path) folders = sorted(int(i) for i in get_dir_filenames(full_base_folder_path) if i.isdigit()) if folders: last_folder = folders[-1] folder_path = join_paths(base_folder_path, last_folder) full_folder_path = join_paths(self.storage_path, folder_path) if len(get_dir_filenames(full_folder_path)) >= self.max_blocks_per_folder: folder_path = join_paths(base_folder_path, last_folder + 1) while True: filename = make_unique_hash(length=80) path = join_paths(folder_path, filename) full_path = join_paths(self.storage_path, path) if not isfile(full_path): return full_path, path
def create_temporary_file(mode='wb'): temporary_path = join_paths(FILES_TEMPORARY_DIR, make_unique_hash(64)) open_file = get_open_file(temporary_path, mode=mode) return temporary_path, open_file
from ines.path import join_paths from ines.url import get_url_file from ines.url import open_json_url from ines.utils import file_unique_code from ines.utils import get_dir_filenames from ines.utils import get_open_file from ines.utils import make_unique_hash from ines.utils import make_dir from ines.utils import put_binary_on_file from ines.utils import remove_file_quietly from ines.utils import string_unique_code TODAY_DATE = datetime.date.today FilesDeclarative = sql_declarative_base('ines.storage') FILES_TEMPORARY_DIR = join_paths(gettempdir(), 'ines-tmp-files') EMPTY_STRING = u('') class BaseStorageSessionManager(BaseSQLSessionManager): __api_name__ = 'storage' __database_name__ = 'ines.storage' def __init__(self, *args, **kwargs): super(BaseStorageSessionManager, self).__init__(*args, **kwargs) make_dir(self.settings['folder_path']) if issubclass(self.session, BaseStorageWithImageSession): self.image_cls = _import_module('PIL.Image') self.resize_quality = self.image_cls.ANTIALIAS
def get_reference_path(self, name): first_name = name.split(' ', 1)[0] first_name_256 = make_sha256(first_name) return join_paths(self.reference_path, first_name_256[0], first_name_256)
def __init__(self, *args, **kwargs): super(SaveMeWithReference, self).__init__(*args, **kwargs) self.reference_path = make_dir(join_paths(self.path, 'references'))
def get_file_path(self, name): name_256 = make_sha256(name) return join_paths(self.path, name_256[0], name_256)
def get_reference_file_path(self, session_key_256): session_key_256 = to_string(session_key_256) return normpath( join_paths( self.settings['token.session_reference_path'], session_key_256))