def push(self, data=None, arcname=None, force=False, **meta): if isinstance(data, OutPacket): _logger.trace('Writing %r sub packet to %r', data.path, self) if not arcname: arcname = data.basename f = data.pop() try: self._addfile(arcname, f, force) finally: f.close() return elif data is None: _logger.trace('Writing %r record to %r', meta, self) self._add(arcname, None, meta) return elif hasattr(data, 'fileno'): _logger.trace('Writing %r blob to %r', data.name, self) meta['content_type'] = 'blob' self._add(arcname, data, meta) return _logger.trace('Writing %r records to %r', data, self) if not hasattr(data, 'next'): data = iter(data) try: chunk = json.dumps(data.next()) except StopIteration: return meta['content_type'] = 'records' while chunk is not None: self._flush(0, True) limit = self._enforce_limit() with toolkit.NamedTemporaryFile() as arcfile: while True: limit -= len(chunk) if limit <= 0: break arcfile.write(chunk) arcfile.write('\n') try: chunk = json.dumps(data.next()) except StopIteration: chunk = None break if not arcfile.tell(): if chunk is not None: _logger.trace('Reach size limit for %r packet', self) raise DiskFull() break arcfile.seek(0) self._add(arcname, arcfile, meta)
def found(self, clone_path): hashed_path, checkin_path = _checkin_path(clone_path) if exists(checkin_path): return _logger.debug('Checking in activity from %r', clone_path) try: spec = Spec(root=clone_path) except Exception: util.exception(_logger, 'Cannot read %r spec', clone_path) return context = spec['Activity', 'bundle_id'] context_path = _ensure_context_path(context, hashed_path) if lexists(context_path): os.unlink(context_path) os.symlink(clone_path, context_path) if lexists(checkin_path): os.unlink(checkin_path) client.ensure_path(checkin_path) os.symlink(relpath(context_path, dirname(checkin_path)), checkin_path) if self._contexts.exists(context): self._contexts.update(context, {'clone': 2}) else: _logger.debug('Register unknown local activity, %r', context) mtime = os.stat(spec.root).st_mtime self._contexts.create(guid=context, type='activity', title={DEFAULT_LANG: spec['name']}, summary={DEFAULT_LANG: spec['summary']}, description={DEFAULT_LANG: spec['description']}, clone=2, ctime=mtime, mtime=mtime) icon_path = join(spec.root, spec['icon']) if exists(icon_path): self._contexts.set_blob(context, 'artifact_icon', icon_path) with toolkit.NamedTemporaryFile() as f: toolkit.svg_to_png(icon_path, f.name, 32, 32) self._contexts.set_blob(context, 'icon', f.name) self._checkin_activity(spec)
def __init__(self, path=None, stream=None): self._file = None self._tarball = None self.header = {} self.content_type = None try: if stream is None: self._file = stream = file(path, 'rb') elif not hasattr(stream, 'seek'): # tarfile/gzip/zip might require seeking self._file = toolkit.NamedTemporaryFile() if hasattr(stream, 'read'): while True: chunk = stream.read(BUFFER_SIZE) if not chunk: break self._file.write(chunk) else: for chunk in stream: self._file.write(chunk) self._file.flush() self._file.seek(0) stream = self._file self._tarball = tarfile.open('r', fileobj=stream) with self._extract('header') as f: self.header = json.load(f) enforce(type(self.header) is dict, 'Incorrect header') if isinstance(self._tarball.fileobj, file): self.content_type = 'application/x-tar' elif isinstance(self._tarball.fileobj, gzip.GzipFile): self.content_type = 'application/x-compressed-tar' else: self.content_type = 'application/x-bzip-compressed-tar' except Exception, error: self.close() util.exception() raise RuntimeError('Malformed %r packet: %s' % (self, error))
def _load_pubkey(pubkey): pubkey = pubkey.strip() try: with toolkit.NamedTemporaryFile() as key_file: key_file.file.write(pubkey) key_file.file.flush() # SSH key needs to be converted to PKCS8 to ket M2Crypto read it pubkey_pkcs8 = util.assert_call( ['ssh-keygen', '-f', key_file.name, '-e', '-m', 'PKCS8']) except Exception: message = 'Cannot read DSS public key gotten for registeration' util.exception(message) if node.trust_users.value: logging.warning('Failed to read registration pubkey, ' 'but we trust users') # Keep SSH key for further converting to PKCS8 pubkey_pkcs8 = pubkey else: raise ad.Forbidden(message) return str(hashlib.sha1(pubkey.split()[1]).hexdigest()), pubkey_pkcs8
def _download_blob(self, url): _logger.debug('Download %r blob', url) if self._downloader is None: self._downloader = http.Client() response = self._downloader.request('GET', url, allow_redirects=True) content_length = response.headers.get('Content-Length') content_length = int(content_length) if content_length else 0 ostream = toolkit.NamedTemporaryFile() try: chunk_size = min(content_length, BUFFER_SIZE) # pylint: disable-msg=E1103 for chunk in response.iter_content(chunk_size=chunk_size): ostream.write(chunk) except Exception: ostream.close() raise ostream.seek(0) return ostream
def get(guid): path = join(local_root.value, 'cache', 'implementation', guid) if exists(path): return path _logger.debug('Fetch %r implementation', guid) # TODO Per download progress pipe.feedback('download') response = Client().request('GET', ['implementation', guid, 'data'], allow_redirects=True) content_length = int(response.headers.get('Content-Length', '0')) with toolkit.NamedTemporaryFile() as tmp_file: chunk_size = min(content_length, BUFFER_SIZE) # pylint: disable-msg=E1103 for chunk in response.iter_content(chunk_size=chunk_size): tmp_file.write(chunk) tmp_file.flush() os.makedirs(path) try: with Bundle(tmp_file.name, 'application/zip') as bundle: bundle.extractall(path) except Exception: shutil.rmtree(path, ignore_errors=True) raise topdir = os.listdir(path)[-1:] if topdir: for exec_dir in ('bin', 'activity'): bin_path = join(path, topdir[0], exec_dir) if not exists(bin_path): continue for filename in os.listdir(bin_path): os.chmod(join(bin_path, filename), 0755) return path
def __init__(self, root=None, **kwargs): stream = None if root is None: stream = toolkit.NamedTemporaryFile() OutPacket.__init__(self, root=root, stream=stream, **kwargs)