def __init__(self, pull_key, cookie, pull_cb, **packet_args): self.cookie = cookie self.exception = None self.seconds_remained = 0 self.content_type = None self._path = join(toolkit.tmpdir.value, pull_key + '.pull') self._job = None if exists(self._path): try: with InPacket(self._path) as packet: self.content_type = packet.content_type self.cookie = _Cookie() self.cookie.update(packet.header['cookie']) except Exception: util.exception('Cannot open cached packet for %r, recreate', self._path) os.unlink(self._path) if not exists(self._path): packet = OutPacket(stream=file(self._path, 'wb+'), **packet_args) self.content_type = packet.content_type # TODO Might be useful to set meaningful value here self.seconds_remained = node.pull_timeout.value self._job = coroutine.spawn(self._pull, packet, pull_cb)
def presolve(names): for repo in _get_repos(obs_presolve_project.value): for arch in repo['arches']: dirname = join(obs_presolve_path.value, repo['name'], arch) if not exists(dirname): os.makedirs(dirname) for package in names: try: response = _request('GET', ['resolve'], params={ 'project': obs_presolve_project.value, 'repository': repo['name'], 'arch': arch, 'package': package, 'withdeps': '1', 'exclude': 'sugar', }) except Exception: util.exception('Failed to resolve %s:%s:%s for presolving', repo['name'], arch, package) continue deps_graph = [] for pkg in response.findall('binary'): deps_graph.append(dict(pkg.items())) with util.new_file(join(dirname, package)) as f: json.dump(deps_graph, f)
def versions(self, value): result = [] if self.request.mountpoint == '~': for path in clones.walk(self.guid): try: spec = Spec(root=path) except Exception: util.exception('Failed to read %r spec file', path) continue result.append({ 'guid': spec.root, 'version': spec['version'], 'arch': '*-*', 'stability': 'stable', 'commands': { 'activity': { 'exec': spec['Activity', 'exec'], }, }, 'requires': spec.requires, }) else: impls, __ = self.volume['implementation'].find( limit=ad.MAX_LIMIT, context=self.guid, layer=self.request.get('layer')) for impl in impls: for arch, spec in impl['spec'].items(): spec['guid'] = impl.guid spec['version'] = impl['version'] spec['arch'] = arch spec['stability'] = impl['stability'] result.append(spec) return result
def _run(self): for cls in self._document_classes: _logger.info('Open %r index', cls.metadata.name) self._writers[cls.metadata.name] = \ IndexWriter(self._root, cls.metadata) closing = False while True: document, op, args, to_commit = _queue.pop_start(not closing) if document is None: _queue.pop_done(document, to_commit) closing = True continue writer = self._writers[document] if op is not None: _logger.debug('Start processing %r(%r) for %r index', op, args, document) try: op(writer, *args) except Exception: global errnum errnum += 1 util.exception(_logger, 'Cannot process %r(%r) for %r index', op, args, document) if to_commit: writer.commit() _queue.pop_done(document, to_commit)
def _post(self, request, access): enforce(isinstance(request.content, dict), 'Invalid value') directory = self.volume[request['document']] if 'guid' in request: doc = directory.get(request['guid']) else: doc = directory.document_class(None, {}) doc.request = request blobs = [] for name, value in request.content.items(): prop = directory.metadata[name] if isinstance(prop, BlobProperty) and access == env.ACCESS_WRITE: if doc.meta(name) is None: prop.assert_access(env.ACCESS_CREATE) else: prop.assert_access(env.ACCESS_WRITE) else: prop.assert_access(access) if prop.on_set is not None: value = prop.on_set(doc, value) if isinstance(prop, BlobProperty): enforce(PropertyMeta.is_blob(value), 'Invalid BLOB value') blobs.append((name, value)) else: if prop.localized and isinstance(value, basestring): value = {request.accept_language[0]: value} try: doc.props[name] = prop.decode(value) except Exception, error: error = 'Value %r for %r property is invalid: %s' % \ (value, prop.name, error) util.exception(error) raise RuntimeError(error)
def _pull(self, packet, cb): try: cb(self.cookie, packet) except DiskFull: pass except Exception, exception: util.exception('Error while making %r pull', self.cookie) self.exception = exception self.unlink()
def _call(path, filename, cb): cb = _connects[filename][cb] if cb is None: return _logger.debug('Call %r for %r mount', cb, path) try: cb(path) except Exception: util.exception(_logger, 'Cannot call %r for %r mount', cb, path)
def _close(self): while self._writers: name, writer = self._writers.popitem() _logger.info('Closing %r index', name) try: writer.close() except Exception: global errnum errnum += 1 util.exception(_logger, 'Fail to close %r index', name)
def _do_open(self): try: self._db = xapian.WritableDatabase(self._path, xapian.DB_CREATE_OR_OPEN) except xapian.DatabaseError: util.exception('Cannot open Xapian index in %r, will rebuild it', self.metadata.name) shutil.rmtree(self._path, ignore_errors=True) self._db = xapian.WritableDatabase(self._path, xapian.DB_CREATE_OR_OPEN)
def notify(self, event): for callback, condition in self._subscriptions.items(): for key, value in condition.items(): if event.get(key) not in ('*', value): break else: try: callback(event) except Exception: util.exception(_logger, 'Failed to dispatch %r', event)
def serve_forever(self): while True: coroutine.select([self.fileno()], [], []) if self.closed: break for filename, event, cb in self.read(): try: cb(filename, event) except Exception: util.exception('Cannot dispatch 0x%X event for %r', event, filename) coroutine.dispatch()
def publish(self, event, request=None): if request is not None: event = request.content for callback, condition in self._subscriptions.items(): for key, value in condition.items(): if event.get(key) != value: break else: try: callback(event) except Exception: util.exception(_logger, 'Failed to dispatch %r', event)
def _load_feed(context): feed = _Feed(context) if context == 'sugar': try: # pylint: disable-msg=F0401 from jarabe import config feed.implement_sugar(config.version) feed.name = context return feed except ImportError: pass mountpoint = None feed_content = None for mountpoint in _mountpoints: try: feed_content = _client.get( ['context', context], reply=['title', 'packages', 'versions', 'dependencies'], mountpoint=mountpoint) pipe.trace('Found %s in %s mountpoint', context, mountpoint) break except Exception: util.exception(_logger, 'Failed to fetch %r feed from %r mountpoint', context, mountpoint) if feed_content is None: pipe.trace('No feeds for %s', context) return None feed.mountpoint = mountpoint feed.name = feed_content['title'] packages = feed_content['packages'] distr = '-'.join([lsb_release.distributor_id(), lsb_release.release()]) if distr in packages and packages[distr].get('status') == 'success': feed.to_resolve = packages[distr].get('binary') elif lsb_release.distributor_id() in packages: feed.to_resolve = packages[lsb_release.distributor_id()].get('binary') elif packages: pipe.trace('No compatible packages for %s', context) for release in feed_content['versions']: feed.implement(release, feed_content['dependencies']) if not feed.to_resolve and not feed.implementations: pipe.trace('No implementations for %s', context) return feed
def run(self): _logger.debug('Start processing queue') try: self._run() except _EmptyQueue: self._close() except Exception: global errnum errnum += 1 util.exception( 'Write queue died, will abort the whole application') thread.interrupt_main() finally: _logger.debug('Stop processing queue')
def delete(self, guid): """Remove document properties from the storage. :param guid: document to remove """ path = self._path(guid) if not exists(path): return try: shutil.rmtree(path) except Exception, error: util.exception() raise RuntimeError('Cannot delete %r document from %r: %s' % (guid, self.metadata.name, error))
def call(self, request, response=None): for a_try in range(2): if not self.mounted.is_set(): self.set_mounted(True) _logger.debug('Wait for %s second(s) for remote connection', client.connect_timeout.value) self.mounted.wait(client.connect_timeout.value) try: try: return ad.CommandsProcessor.call(self, request, response) except ad.CommandNotFound: return self.proxy_call(request, response) except http.ConnectionError: if a_try: raise util.exception('Got connection error, try to reconnect') continue
def found(self, clone_path): hashed_path, checkin_path = _checkin_path(clone_path) if exists(checkin_path): return _logger.debug('Checking in activity from %r', clone_path) try: spec = Spec(root=clone_path) except Exception: util.exception(_logger, 'Cannot read %r spec', clone_path) return context = spec['Activity', 'bundle_id'] context_path = _ensure_context_path(context, hashed_path) if lexists(context_path): os.unlink(context_path) os.symlink(clone_path, context_path) if lexists(checkin_path): os.unlink(checkin_path) client.ensure_path(checkin_path) os.symlink(relpath(context_path, dirname(checkin_path)), checkin_path) if self._contexts.exists(context): self._contexts.update(context, {'clone': 2}) else: _logger.debug('Register unknown local activity, %r', context) mtime = os.stat(spec.root).st_mtime self._contexts.create(guid=context, type='activity', title={DEFAULT_LANG: spec['name']}, summary={DEFAULT_LANG: spec['summary']}, description={DEFAULT_LANG: spec['description']}, clone=2, ctime=mtime, mtime=mtime) icon_path = join(spec.root, spec['icon']) if exists(icon_path): self._contexts.set_blob(context, 'artifact_icon', icon_path) with toolkit.NamedTemporaryFile() as f: toolkit.svg_to_png(icon_path, f.name, 32, 32) self._contexts.set_blob(context, 'icon', f.name) self._checkin_activity(spec)
def populate(self): """Populate the index. This function needs be called right after `init()` to pickup possible pending changes made during the previous session when index was not propertly closed. :returns: function is a generator that will be iterated after picking up every object to let the caller execute urgent tasks """ found = False migrate = (self._index.mtime == 0) for guid in self._storage.walk(self._index.mtime): if not found: _logger.info('Start populating %r index', self.metadata.name) found = True if migrate: self._storage.migrate(guid) record = self._storage.get(guid) try: props = {} for name, prop in self.metadata.items(): if not isinstance(prop, StoredProperty): continue meta = record.get(name) if meta is not None: props[name] = meta['value'] self._index.store(guid, props, None, None, None) yield except Exception: util.exception('Cannot populate %r in %r, invalidate it', guid, self.metadata.name) record.invalidate() self._index.checkpoint() if found: self._save_layout() self.commit() self._notify({'event': 'populate'})
def __init__(self, path=None, stream=None): self._file = None self._tarball = None self.header = {} self.content_type = None try: if stream is None: self._file = stream = file(path, 'rb') elif not hasattr(stream, 'seek'): # tarfile/gzip/zip might require seeking self._file = toolkit.NamedTemporaryFile() if hasattr(stream, 'read'): while True: chunk = stream.read(BUFFER_SIZE) if not chunk: break self._file.write(chunk) else: for chunk in stream: self._file.write(chunk) self._file.flush() self._file.seek(0) stream = self._file self._tarball = tarfile.open('r', fileobj=stream) with self._extract('header') as f: self.header = json.load(f) enforce(type(self.header) is dict, 'Incorrect header') if isinstance(self._tarball.fileobj, file): self.content_type = 'application/x-tar' elif isinstance(self._tarball.fileobj, gzip.GzipFile): self.content_type = 'application/x-compressed-tar' else: self.content_type = 'application/x-bzip-compressed-tar' except Exception, error: self.close() util.exception() raise RuntimeError('Malformed %r packet: %s' % (self, error))
def _handshake(self): if self._response is not None: return self._response.raw _logger.debug('Subscribe to %r', self._client.api_url) for a_try in reversed(xrange(self._tries)): try: self._response = self._client.request( 'GET', params={'cmd': 'subscribe'}) break except Exception: if a_try == 0: raise util.exception( _logger, 'Cannot subscribe to %r, retry in %s second(s)', self._client.api_url, _RECONNECTION_TIMEOUT) coroutine.sleep(_RECONNECTION_TIMEOUT) return self._response.raw
def pull(self): for a_try in (1, 0): stream = self._handshake() try: line = _readline(stream) enforce(line is not None, 'Subscription aborted') break except Exception: if a_try == 0: raise util.exception( 'Failed to read from %r subscription, ' 'will resubscribe', self._client.api_url) self._response = None if line.startswith('data: '): try: return json.loads(line.split(' ', 1)[1]) except Exception: util.exception('Failed to parse %r event from %r subscription', line, self._client.api_url)
def _process_aliases(self, aliases): packages = {} for repo in obs.get_repos(): alias = aliases.get(repo['distributor_id']) if not alias or '*' not in alias: continue packages[repo['distributor_id']] = alias['*'] alias = alias['*'].copy() try: to_resolve = alias.get('binary', []) + \ alias.get('devel', []) if to_resolve: for arch in repo['arches']: obs.resolve(repo['name'], arch, to_resolve) alias['status'] = 'success' else: alias['status'] = 'no packages to resolve' except Exception, error: util.exception('Failed to resolve %r', alias) alias = {'status': str(error)} packages[repo['name']] = alias
def _load_pubkey(pubkey): pubkey = pubkey.strip() try: with toolkit.NamedTemporaryFile() as key_file: key_file.file.write(pubkey) key_file.file.flush() # SSH key needs to be converted to PKCS8 to ket M2Crypto read it pubkey_pkcs8 = util.assert_call( ['ssh-keygen', '-f', key_file.name, '-e', '-m', 'PKCS8']) except Exception: message = 'Cannot read DSS public key gotten for registeration' util.exception(message) if node.trust_users.value: logging.warning('Failed to read registration pubkey, ' 'but we trust users') # Keep SSH key for further converting to PKCS8 pubkey_pkcs8 = pubkey else: raise ad.Forbidden(message) return str(hashlib.sha1(pubkey.split()[1]).hexdigest()), pubkey_pkcs8
def parse_version(version_string): """Convert a version string to an internal representation. The parsed format can be compared quickly using the standard Python functions. Adapted Zero Install version. :param version_string: version in format supported by 0install :returns: array of arrays of integers """ if version_string is None: return None parts = _VERSION_RE.split(version_string) if parts[-1] == '': del parts[-1] # Ends with a modifier else: parts.append('') enforce(parts, ValueError, 'Empty version string') length = len(parts) try: for x in range(0, length, 2): part = parts[x] if part: parts[x] = [int(i) for i in parts[x].split('.')] else: parts[x] = [] # (because ''.split('.') == [''], not []) for x in range(1, length, 2): parts[x] = _VERSION_MOD_TO_VALUE[parts[x]] return parts except ValueError as error: util.exception() raise RuntimeError('Invalid version format in "%s": %s' % (version_string, error)) except KeyError as error: raise RuntimeError('Invalid version modifier in "%s": %s' % (version_string, error))
def _connect(self): for url in self._api_urls: try: _logger.debug('Connecting to %r node', url) self._client = Client(url) info = self._client.get(cmd='info') if self._listen_events: subscription = self._client.subscribe() except Exception: util.exception(_logger, 'Cannot connect to %r node', url) continue impl_info = info['documents'].get('implementation') if impl_info: injector.invalidate_solutions(impl_info['mtime']) self._remote_volume_guid = info['guid'] _logger.info('Connected to %r node', url) self._url = url _Mount.set_mounted(self, True) if not self._listen_events: break try: for event in subscription: if event.get('document') == 'implementation': mtime = event.get('props', {}).get('mtime') if mtime: injector.invalidate_solutions(mtime) event['mountpoint'] = self.mountpoint self.publish(event) except Exception: util.exception(_logger, 'Failed to dispatch remote event') finally: _logger.info('Got disconnected from %r node', url) _Mount.set_mounted(self, False) self._client.close() self._client = None
def _fetch_page(self, page): offset = page * self._page_size params = { 'mountpoint': self.mountpoint, 'document': self.document, } for key, value in self._filters.items(): if value is not None: params[key] = value params['offset'] = offset params['limit'] = self._page_size if self._query: params['query'] = self._query if self._order_by: params['order_by'] = self._order_by if self._reply: params['reply'] = self._reply try: response = Client.call('GET', **params) self._total = response['total'] except Exception: util.exception(_logger, 'Failed to fetch %r query', params) self._total = None return False result = [None] * len(response['result']) for i, props in enumerate(response['result']): result[i] = Object(self.mountpoint, self.document, self._reply, props['guid'], props, offset + i) if not self._page_access or self._page_access[-1] != page: if len(self._page_access) == _QUERY_PAGES_NUMBER: del self._pages[self._page_access[0]] self._page_access.append(page) self._pages[page] = result return True
def _do_open(self, reset): if reset: self._dirty = True if self._db is None: if exists(self._path): self._drop_pages() else: return elif not self._drop_pages() and not self._dirty: return try: if self._db is None: self._db = xapian.Database(self._path) _logger.debug('Opened %r RO index', self.metadata.name) else: self._db.reopen() _logger.debug('Re-opened %r RO index', self.metadata.name) self._dirty = False except Exception: util.exception(_logger, 'Cannot open %r RO index', self.metadata.name) self._db = None
def sync_session(self, path=None): enforce(self._mount is not None, 'No server to sync') _logger.debug( 'Start synchronization session with %r session ' 'for %r mounts', self._sync_session, self._sync_mounts) def sync(path): self._mount.publish({'event': 'sync_start', 'path': path}) self._sync_session = self.sync(path, **(self._sync_session or {})) return self._sync_session is None try: while True: if path and sync(path): break for mountpoint in self._sync_mounts: if sync(mountpoint): break break except Exception, error: util.exception(_logger, 'Failed to complete synchronization') self._mount.publish({'event': 'sync_error', 'error': str(error)}) self._sync_session = None
class Router(object): def __init__(self, commands): self.commands = commands self._authenticated = set() self._valid_origins = set() self._invalid_origins = set() self._host = None self._routes = {} self._scan_for_routes(commands) self._scan_for_routes(self) if 'SSH_ASKPASS' in os.environ: # Otherwise ssh-keygen will popup auth dialogs on registeration del os.environ['SSH_ASKPASS'] def authenticate(self, request): user = request.environ.get('HTTP_SUGAR_USER') if user is None: return None if user not in self._authenticated and \ (request.path != ['user'] or request['method'] != 'POST'): _logger.debug('Logging %r user', user) request = Request(method='GET', cmd='exists', document='user', guid=user) enforce(self.commands.call(request, ad.Response()), Unauthorized, 'Principal user does not exist') self._authenticated.add(user) return user def call(self, request, response): if 'HTTP_ORIGIN' in request.environ: enforce(self._assert_origin(request.environ), ad.Forbidden, 'Cross-site is not allowed for %r origin', request.environ['HTTP_ORIGIN']) response['Access-Control-Allow-Origin'] = \ request.environ['HTTP_ORIGIN'] if request['method'] == 'OPTIONS': # TODO Process OPTIONS request per url? if request.environ['HTTP_ORIGIN']: response['Access-Control-Allow-Methods'] = \ request.environ['HTTP_ACCESS_CONTROL_REQUEST_METHOD'] response['Access-Control-Allow-Headers'] = \ request.environ['HTTP_ACCESS_CONTROL_REQUEST_HEADERS'] else: response['Allow'] = 'GET, POST, PUT, DELETE' response.content_length = 0 return None request.principal = self.authenticate(request) if request.path[:1] == ['static']: path = join(static.PATH, *request.path[1:]) result = ad.PropertyMeta(path=path, mime_type=_get_mime_type(path), filename=split(path)[-1]) else: rout = self._routes.get( (request['method'], request.path[0] if request.path else '')) if rout: result = rout(request, response) else: result = self.commands.call(request, response) if isinstance(result, ad.PropertyMeta): if 'url' in result: raise Redirect(result['url']) path = result['path'] enforce(isfile(path), 'No such file') mtime = result.get('mtime') or os.stat(path).st_mtime if request.if_modified_since and mtime and \ mtime <= request.if_modified_since: raise NotModified() response.last_modified = mtime response.content_type = result.get('mime_type') or \ 'application/octet-stream' filename = result.get('filename') if not filename: filename = _filename( result.get('name') or splitext(split(path)[-1])[0], response.content_type) response['Content-Disposition'] = \ 'attachment; filename="%s"' % filename result = file(path, 'rb') if hasattr(result, 'read'): if hasattr(result, 'fileno'): response.content_length = os.fstat(result.fileno()).st_size elif hasattr(result, 'seek'): result.seek(0, 2) response.content_length = result.tell() result.seek(0) result = stream_reader(result) return result def __call__(self, environ, start_response): request = _Request(environ) request_repr = str(request) if _logger.level <= logging.DEBUG else None response = _Response() js_callback = None if 'callback' in request: js_callback = request.pop('callback') result = None try: result = self.call(request, response) except HTTPStatusPass, error: response.status = error.status if error.headers: response.update(error.headers) response.content_type = None except Exception, error: util.exception('Error while processing %r request', request.url) if isinstance(error, ad.NotFound): response.status = '404 Not Found' elif isinstance(error, ad.Forbidden): response.status = '403 Forbidden' elif isinstance(error, HTTPStatus): response.status = error.status response.update(error.headers or {}) result = error.result else: response.status = '500 Internal Server Error' if result is None: result = { 'error': str(error), 'request': request.url, } response.content_type = 'application/json'