def delete_after(filename): """ Context manager for closing and deleting a temporary file after usage """ yield None try: os.unlink(filename) except OSError: LOG.error('Unable to remove \'{}\''.format(filename))
def SearchableText(obj): """ Wrapper code for catching and logging error inside the indexer because plone.indexer is stupid and swallows errors without logging. """ try: return _SearchableText(obj) except Exception as e: LOG.error('SearchableText indexer error ({})'.format(e), exc_info=True) raise
def tearDownZope(self, app): handle = get_fs_wrapper(WEBDAV_URL, credentials=dict(username=WEBDAV_USERNAME, password=WEBDAV_PASSWORD)) if handle.exists(self.testing_directory): try: handle.removedir( self.testing_directory, recursive=True, force=True) except Exception as e: LOG.error('tearDownZope() failed ({})'.format(e)) z2.uninstallProduct(app, 'xmldirector.plonecore')
def tearDownZope(self, app): handle = get_fs_wrapper(CONNECTOR_URL, credentials=dict(username=CONNECTOR_USERNAME, password=CONNECTOR_PASSWORD)) if handle.exists(self.testing_directory): try: handle.removedir(self.testing_directory, recursive=True, force=True) except Exception as e: LOG.error('tearDownZope() failed ({})'.format(e)) z2.uninstallProduct(app, 'xmldirector.plonecore')
def render(self): if IConnector.providedBy( self.context) and not self.context.api_enabled: raise ValueError('API access disabled for {}'.format( self.context.absolute_url)) try: return self._render() except Exception as e: LOG.error(self.request.text()) LOG.error(e, exc_info=True) raise e
def webdav_handle(self, subpath=None): """ Return WebDAV handle to root of configured connector object including configured webdav_subpath. """ registry = getUtility(IRegistry) settings = registry.forInterface(IWebdavSettings) adapted = IConnector(self) url = adapted.webdav_url or settings.webdav_url if adapted.webdav_subpath: url += '/{}'.format(adapted.webdav_subpath) if subpath: url += '/{}'.format(urllib.quote(subpath)) # system-wide credentials username = settings.webdav_username password = settings.webdav_password or '' # local credentials override the system credentials if adapted.webdav_url: username = adapted.webdav_username or '' password = adapted.webdav_password or '' try: return get_fs_wrapper(url, credentials=dict(username=username, password=password)) except fs.errors.ResourceNotFoundError: LOG.error(u'Error accessing {}::{}::{}'.format( self.absolute_url(), url, self.REQUEST.get('HTTP_USER_AGENT')), exc_info=True) raise zExceptions.NotFound(url) except fs.errors.ResourceInvalidError: parts = url.rsplit('/', 1) wrapper = get_fs_wrapper(parts[0], credentials=dict(username=username, password=password)) wrapper.__leaf__ = True wrapper.__leaf_filename__ = parts[1] return wrapper except fs.errors.RemoteConnectionError as e: # LOG.error(u'Error accessing {}::{}::{}'.format(self.absolute_url(), url, self.REQUEST.get('HTTP_USER_AGENT')), exc_info=True) exc = RuntimeError(url) exc.url = url raise exc except Exception as e: LOG.error(u'Error accessing {}::{}::{}'.format( self.absolute_url(), url, self.REQUEST.get('HTTP_USER_AGENT')), exc_info=True) e.url = url raise e
def entries(self): release_uri = self.request.get('release') if release_uri: handle = getUtility(IConnectorHandle).get_handle() release_uri = release_uri.lstrip('/db') if handle.exists(release_uri): handle.remove(release_uri) api_view = API(context=None, request=self.request) try: results = api_view.generic_query( 'all-locks', deserialize_json=True) except APIError as e: msg = u'Unable to retrieve locks ({})'.format(e) LOG.error(msg, exc_info=False) return dict(error=msg, rows=()) if results: return dict(error=None, rows=results['lock']) return dict(error=None, rows=())
def entries(self): release_uri = self.request.get('release') if release_uri: handle = getUtility(IWebdavHandle).webdav_handle() release_uri = release_uri.lstrip('/db') if handle.exists(release_uri): handle.remove(release_uri) api_view = API(context=None, request=self.request) try: results = api_view.generic_query('all-locks', deserialize_json=True) except APIError as e: msg = u'Unable to retrieve locks ({})'.format(e) LOG.error(msg, exc_info=False) return dict(error=msg, rows=()) if results: return dict(error=None, rows=results['lock']) return dict(error=None, rows=())
def get_fs_wrapper(url, credentials=None, context=None): if not url.endswith('/'): url += '/' f = furl(url) original_url = url if f.scheme == 'file': # hack for OSFP, fix this path = urllib.unquote(url[7:]) wrapper = OSFSWrapper(path, encoding='utf-8') elif f.scheme.startswith(('http', 'https')): try: wrapper = DAVFSWrapper(original_url, credentials) except fs.errors.ResourceNotFoundError: LOG.info('Failed to get DAVFSWrapper for {}'.format(original_url), exc_info=True) raise NotFound(original_url) except Exception as e: LOG.error('Failed to get DAVFSWrapper for {}'.format(original_url), exc_info=True) raise e elif f.scheme == 's3': if have_boto: wrapper = S3FSWrapper(bucket=f.host, prefix=str(f.path), aws_access_key=credentials['username'], aws_secret_key=credentials['password']) else: raise ImportError( 'boto module is not installed (required for S3 access)') elif f.scheme == 'sftp': f_path = urllib.unquote(str(f.path)) if have_paramiko: wrapper = SFTPFSWrapper(connection=(f.host, f.port or 22), root_path=f_path, username=(credentials['username'] or None), password=(credentials['password'] or None)) if wrapper.isfile('.') and wrapper.isdir('.'): parts = filter(None, f_path.split('/')) wrapper = SFTPFSWrapper(connection=(f.host, f.port or 22), root_path='/'.join(parts[:-1]), username=(credentials['username'] or None), password=(credentials['password'] or None)) wrapper.__leaf__ = True wrapper.__leaf_filename__ = parts[-1] else: raise ImportError( 'paramiko module is not installed (required for SFTP access)') elif f.scheme == 'ftp': wrapper = FTPFSWrapper(host=f.host, port=f.port, user=credentials['username'], passwd=credentials['password']) elif f.scheme == 'dropbox': registry = getUtility(IRegistry) settings = registry.forInterface(IDropboxSettings) annotation = IAnnotations(context) token_key = annotation.get(dropbox_authentication.DROPBOX_TOKEN_KEY) token_secret = annotation.get( dropbox_authentication.DROPBOX_TOKEN_SECRET) if not token_key or not token_secret: context = zope.globalrequest.getRequest().PUBLISHED.context authorization_url = '{}/authorize-dropbox'.format( context.absolute_url()) raise RuntimeError( 'Connector does not seem to be ' 'authorized with Dropbox (use {})'.format(authorization_url)) wrapper = DropboxFSWrapper( settings.dropbox_app_key, settings.dropbox_app_secret, 'dropbox', annotation[dropbox_authentication.DROPBOX_TOKEN_KEY], annotation[dropbox_authentication.DROPBOX_TOKEN_SECRET], root_path=urllib.unquote(str(f.path))) if wrapper.isfile('.'): wrapper.__leaf__ = True wrapper.__leaf_filename__ = '.' else: raise ValueError('Unsupported URL schema {}'.format(original_url)) wrapper.url = url return wrapper
def parse_folder(self, family, directory, version_suffix=None): """ Parse a given folder for XML schema files (.xsd) or DTD files (.dtd). """ if directory.startswith('/'): directory = 'file://' + directory try: handle = fs.opener.fsopendir(directory) except Exception as e: raise IOError( u'Directory "{}" does not exist ({})'.format(directory, e)) for name in handle.listdir(): fullname = os.path.join(directory, name) LOG.debug(u'Parsing "{}"'.format(fullname)) base, ext = os.path.splitext(name) registered_name = name if version_suffix: basename, ext = os.path.splitext(name) registered_name = '{}-{}{}'.format(basename, version_suffix, ext) key = '{}::{}'.format(family, registered_name) ts = time.time() if ext == '.dtd': with handle.open(name, 'rb') as fp: validator = lxml.etree.DTD(fp) validator_type = 'DTD' elif ext == '.xsd': with handle.open(name, 'rb') as fp: try: schema_doc = lxml.etree.XML(fp.read()) validator = lxml.etree.XMLSchema(schema_doc) except Exception as e: LOG.error(u'Unable to parse XML Schema ({})'.format( e), exc_info=True) continue validator_type = 'XSD' elif ext == '.rng': with handle.open(name, 'rb') as fp: relaxng_doc = lxml.etree.XML(fp.read()) validator = lxml.etree.RelaxNG(relaxng_doc) validator_type = 'RELAXNG' elif ext == '.sch': with handle.open(name, 'rb') as fp: relaxng_doc = lxml.etree.XML(fp.read()) validator = lxml.isoschematron.Schematron(relaxng_doc) validator_type = 'SCHEMATRON' else: continue if key in self.registry: raise ValueError('{} already registered'.format(key)) duration = time.time() - ts self.registry[key] = dict( family=family, name=registered_name, validation=validator, path=fullname, info=handle.getinfo(name), duration=duration, type=validator_type, registered=datetime.datetime.utcnow()) if duration > 3: LOG.warn( 'Slow loading/parsing of ({}, {}), duration: {:0.3f} seconds'.format(key, fullname, duration)) LOG.info('Registered ({}, {}), duration: {:0.3f} seconds'.format( key, fullname, duration))
def parse_folder(self, family, directory, version_suffix=None): """ Parse a given folder for XML schema files (.xsd) or DTD files (.dtd). """ if directory.startswith('/'): directory = 'file://' + directory try: handle = fs.opener.fsopendir(directory) except Exception as e: raise IOError( u'Directory "{}" does not exist ({})'.format(directory, e)) for name in handle.listdir(): fullname = os.path.join(directory, name) LOG.info(u'Parsing "{}"'.format(fullname)) base, ext = os.path.splitext(name) registered_name = name if version_suffix: basename, ext = os.path.splitext(name) registered_name = '{}-{}{}'.format(basename, version_suffix, ext) key = '{}::{}'.format(family, registered_name) ts = time.time() if ext == '.dtd': with handle.open(name, 'rb') as fp: validator = lxml.etree.DTD(fp) validator_type = 'DTD' elif ext == '.xsd': with handle.open(name, 'rb') as fp: try: schema_doc = lxml.etree.XML(fp.read()) validator = lxml.etree.XMLSchema(schema_doc) except Exception as e: LOG.error(u'Unable to parse XML Schema ({})'.format( e), exc_info=True) continue validator_type = 'XSD' elif ext == '.rng': with handle.open(name, 'rb') as fp: relaxng_doc = lxml.etree.XML(fp.read()) validator = lxml.etree.RelaxNG(relaxng_doc) validator_type = 'RELAXNG' elif ext == '.sch': with handle.open(name, 'rb') as fp: relaxng_doc = lxml.etree.XML(fp.read()) validator = lxml.isoschematron.Schematron(relaxng_doc) validator_type = 'SCHEMATRON' else: continue if key in self.registry: raise ValueError('{} already registered'.format(key)) duration = time.time() - ts self.registry[key] = dict( family=family, name=registered_name, validation=validator, path=fullname, info=handle.getinfo(name), duration=duration, type=validator_type, registered=datetime.datetime.utcnow()) if duration > 3: LOG.warn( 'Slow loading/parsing of ({}, {}), duration: {:0.3f} seconds'.format(key, fullname, duration)) LOG.info('Registered ({}, {}), duration: {:0.3f} seconds'.format( key, fullname, duration))
# -*- coding: utf-8 -*- ################################################################ # xmldirector.plonecore # (C) 2016, Andreas Jung, www.zopyx.com, Tuebingen, Germany ################################################################ import os import sys import pkg_resources from xmldirector.plonecore.logger import LOG __import__('pkg_resources').declare_namespace(__name__) # Check filesystem encoding fs_enc = sys.getfilesystemencoding() if fs_enc.lower() not in ('utf8', 'utf-8'): LOG.error('Filesystem encoding should be UTF-8, not {}'.format(fs_enc)) # import patches only for Plone 5 dist = pkg_resources.get_distribution('Products.CMFPlone') if dist.version.startswith('5'): import patches LOG.info('Applied patched for Plone 5')