def test_server_basepath(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "base-path = myktbsroot/\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.get('server', 'base-path', 1), 'myktbsroot/')
def test_server_flashallow(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "flash-allow = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.getboolean('server', 'flash-allow'), True)
def test_logging_consolelevel(self): fhandler = StringIO() fhandler.writelines(["[logging]\n", "console-level = DEBUG\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.get('logging', 'console-level', 1), 'DEBUG')
def test_logging_multiple_loggers(self): fhandler = StringIO() fhandler.writelines(["[logging]\n", "loggers = ktbs rdfrest\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.get('logging', 'loggers', 1), 'ktbs rdfrest')
def get_datas_from_file(self, cr, uid, file_doc, fields, dialect, encoding="utf-8", context=None): str_io = StringIO() str_io.writelines(base64.b64decode(file_doc.datas)) str_io.seek(0) return unicodecsv.DictReader(str_io, fieldnames=fields, encoding=encoding, dialect=dialect)
def test_logging_filelevel(self): fhandler = StringIO() fhandler.writelines(["[logging]\n", "file-level = WARNING\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.get('logging', 'file-level', 1), 'WARNING')
def _write_pdb_to_stringio(self, cys_cys_atomidx_set=None, disulfide_conect=True, noter=False, **kwargs): stringio_file = StringIO() stringio_file_out = StringIO() self.parm.write_pdb(stringio_file, **kwargs) stringio_file.seek(0) lines = stringio_file.readlines() if noter: for line in lines: if line.startswith("TER"): lines.remove(line) # TODO: update ParmEd? if disulfide_conect: conect_record = [ 'CONECT%5d%5d\n' % (idx0 + 1, idx1 + 1) for (idx0, idx1) in cys_cys_atomidx_set ] conect_str = ''.join(conect_record) lines[-1] = conect_str + 'END\n' if noter: for line in lines: if line.startswith("TER"): lines.remove(line) stringio_file_out.writelines(lines) stringio_file_out.seek(0) return stringio_file_out
def sendalert(alerts, attachments=[], is_all_fail=False): email = Email() emails = ONE_FAIL_NOTIFICATIONS['emails'] if is_all_fail: emails = ALL_FAIL_NOTIFICATIONS['emails'] p = subprocess.Popen(['hostname'], stdout=subprocess.PIPE) hostname = p.stdout.read() report = StringIO() for x in alerts: print x report.writelines([x, '\r\n\r\n']) body = report.getvalue() report.close() subject = '[WARN] At least one tested failed - %s - %s' % (hostname, time.ctime()) if is_all_fail: subject = '[SERVE] All TEST FAILED for a service - %s - %s' % ( hostname, time.ctime()) email.sender = 'Gagein <*****@*****.**>' retries = 3 while retries > 0: try: email.send(emails, subject, body, '', attachments) retries = 0 except Exception, ex: retries = retries - 1 print '... Retry due to exception: ', ex
def test_rdf_database_forceinit(self): fhandler = StringIO() fhandler.writelines(["[rdf_database]\n", "force-init = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.getboolean('rdf_database', 'force-init'), True)
def test_server_hostport(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "port = 4444\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.getint('server', 'port'), 4444)
def test_server_hostname(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "host-name = testhost\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.get('server', 'host-name', 1), 'testhost')
def test_server_ipv4(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "force-ipv4 = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.getboolean('server', 'force-ipv4'), True)
class UserEnvFile(object): """ Behaves like a file object, but instead of being directly mapped to a file it writes to that file from inside of a UserEnv. """ def __init__(self, userenv, filename): self.stringio = StringIO() self.userenv = userenv self.filename = filename def read(self, *args, **kwargs): self.stringio.read(*args, **kwargs) def readlines(self, *args, **kwargs): self.stringio.readlines(*args, **kwargs) def write(self, *args, **kwargs): self.stringio.write(*args, **kwargs) def writelines(self, *args, **kwargs): self.stringio.writelines(*args, **kwargs) def seek(self, *args, **kwargs): self.stringio.seek(*args, **kwargs) def close(self): self.userenv.write_string_to_file(self.stringio.getvalue(), self.filename) self.userenv = None self.stringio.close()
def sort_changelog(stream): entries = _split_changelog(stream) log = StringIO() for time, count, elines in sorted(entries, reverse=True): log.writelines(elines) log.write("\n") return log
def __repairFile(self, file): ''' Reapair JSON file if necessary If the JSON file is not closed properly, perhaps due a system crash during a test run, then the JSON is repaired by discarding the trailing, incomplete item and appending braces to the file to close the JSON object. The repair is performed on a string buffer, and the given file is never written to. This allows the file to be safely read during a test run. :return: If no repair occured, then ``file`` is returned. Otherwise, a new file object containing the repaired JSON is returned. ''' file.seek(0) lines = file.readlines() # JSON object was not closed properly. # # To repair the file, we execute these steps: # 1. Find the closing brace of the last, properly written # test result. # 2. Discard all subsequent lines. # 3. Remove the trailing comma of that test result. # 4. Append enough closing braces to close the json object. # 5. Return a file object containing the repaired JSON. # Each non-terminal test result ends with this line: safe_line = 3 * JSONWriter.INDENT * ' ' + '},\n' # Search for the last occurence of safe_line. safe_line_num = None for i in range(-1, - len(lines), -1): if lines[i] == safe_line: safe_line_num = i break if safe_line_num is None: raise Exception('failed to repair corrupt result file: ' + file.name) # Remove corrupt lines. lines = lines[0:(safe_line_num + 1)] # Remove trailing comma. lines[-1] = 3 * JSONWriter.INDENT * ' ' + '}\n' # Close json object. lines.append(JSONWriter.INDENT * ' ' + '}\n') lines.append('}') # Return new file object containing the repaired JSON. new_file = StringIO() new_file.writelines(lines) new_file.flush() new_file.seek(0) return new_file
def test_readers2iterators(): sf = StringIO() st = StringIO() sf.write(struct.pack("<dddd", 0.0, 1.0, 2.0, float("nan"))) st.writelines( iter([ json.dumps("hw") + "\n", json.dumps("helo") + "\n", json.dumps("h,w") + "\n", json.dumps("helo, world") + "\n", ])) sf.seek(0) st.seek(0) readers = [sf, st] r2i = [readers2rows.column2i_ledouble, readers2rows.column2i_text] a = readers2rows.readers2iterators(readers, r2i) assert list == type(a) assert 2 == len(a) i0 = a[0] i1 = a[1] lf = list(i0) lt = list(i1) assert 4 == len(lf) assert 4 == len(lt) assert [0.0, 1.0, 2.0, 0.0] == lf assert [ "hw", "helo", "h,w", "helo, world", ] == lt
def test_server_maxtriples(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "max-triples = 1200\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.getint('server', 'max-triples'), 1200)
def sendalert(alerts, attachments=[], is_all_fail=False): email = Email() emails = ONE_FAIL_NOTIFICATIONS['emails'] if is_all_fail: emails = ALL_FAIL_NOTIFICATIONS['emails'] p = subprocess.Popen(['hostname'], stdout=subprocess.PIPE) hostname = p.stdout.read() report = StringIO() for x in alerts: print x report.writelines([x, '\r\n\r\n']); body = report.getvalue() report.close() subject = '[WARN] At least one tested failed - %s - %s' % (hostname, time.ctime()) if is_all_fail: subject = '[SERVE] All TEST FAILED for a service - %s - %s' % (hostname, time.ctime()) email.sender = 'Gagein <*****@*****.**>' retries = 3 while retries > 0: try: email.send(emails, subject, body, '', attachments) retries = 0 except Exception, ex: retries = retries - 1 print '... Retry due to exception: ', ex
def _instance_find(self, name, ignore_case=False, limit=None, regex=None): args = ['locate', name] if ignore_case: args.extend(['-i']) if limit and isinstance(limit, (int, long)): args.extend(['-l', str(limit)]) if self.db_path: args.extend(['-d', self.db_path]) process_pipe = _docommand(args) process_pipe = process_pipe.split("\n") if regex: try: compiled = re.compile(regex) except Exception as e: raise PyLocatedException("Invalid regular expression") process_pipe = filter(lambda x: compiled.match(x), process_pipe) out_put = filter(lambda x: x is not '', process_pipe) buffer = StringIO() buffer.writelines("\n".join(out_put)) return buffer
def test_logging_filename(self): fhandler = StringIO() fhandler.writelines(["[logging]\n", "filename = /var/log/myktbslogs.log\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.get('logging', 'filename', 1) == '/var/log/myktbslogs.log'
def get_forms(self, url=None, *args, **kw): posturl = url or self.get_url() fifo = StringIO() fifo.writelines(self.get_html(url, *args, **kw)) fifo.seek(0) forms = ClientForm.ParseFile(fifo, posturl, backwards_compat=False) return [FORM(self, form) for form in forms]
def test_logging_multiple_loggers(self): fhandler = StringIO() fhandler.writelines(["[logging]\n", "loggers = ktbs rdfrest\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.get('logging', 'loggers', 1) == 'ktbs rdfrest'
def test_logging_filelevel(self): fhandler = StringIO() fhandler.writelines(["[logging]\n", "file-level = WARNING\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.get('logging', 'file-level', 1) == 'WARNING'
def test_logging_one_logger(self): fhandler = StringIO() fhandler.writelines(["[logging]\n", "loggers = ktbs\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.get('logging', 'loggers', 1) == 'ktbs'
def test_server_corsalloworigin(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "cors-allow-origin = xxx\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.getboolean('server', 'cors-allow-origin'), True) pass
def test_rdf_database_repository(self): fhandler = StringIO() fhandler.writelines(["[rdf_database]\n", "repository = /var/myktbs/\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.get('rdf_database', 'repository', 1) == '/var/myktbs/'
def test_plugins_one_item(self): fhandler = StringIO() fhandler.writelines(["[plugins]\n", "test_preproc = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) # There's a default plugin in the config post_via_get eq_(ktbs_config.options('plugins'), ['post_via_get', 'test_preproc'])
def test_rdf_database_forceinit(self): fhandler = StringIO() fhandler.writelines(["[rdf_database]\n", "force-init = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.getboolean('rdf_database', 'force-init') == True
def test_server_maxtriples(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "max-triples = 1200\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.getint('server', 'max-triples') == 1200
def test_server_hostport(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "port = 4444\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.getint('server', 'port') == 4444
def test_server_maxbytes(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "max-bytes = 1234\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.getint('server', 'max-bytes') == 1234
def test_server_basepath(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "base-path = myktbsroot/\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.get('server', 'base-path', 1) == 'myktbsroot/'
def test_server_ipv4(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "force-ipv4 = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.getboolean('server', 'force-ipv4') == True
def test_server_hostname(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "host-name = testhost\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.get('server', 'host-name', 1) == 'testhost'
def test_logging_consolelevel(self): fhandler = StringIO() fhandler.writelines(["[logging]\n", "console-level = DEBUG\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.get('logging', 'console-level', 1) == 'DEBUG'
def test_server_flashallow(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "flash-allow = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.getboolean('server', 'flash-allow') == True
def test_server_corsalloworigin(self): fhandler = StringIO() fhandler.writelines(["[server]\n", "cors-allow-origin = xxx\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.getboolean('server', 'cors-allow-origin') == True pass
def test_plugins_one_item(self): fhandler = StringIO() fhandler.writelines(["[plugins]\n", "test_preproc = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) # There's a default plugin in the config post_via_get assert ktbs_config.options('plugins'), ['post_via_get' == 'test_preproc']
def instance_ssh_keys(request, application_id, cookie): app = get_object_or_404(InstanceApplication, pk=application_id) if cookie != app.cookie: t = get_template("403.html") return HttpResponseForbidden(content=t.render(RequestContext(request))) output = StringIO() output.writelines([k.key_line() for k in app.applicant.sshpublickey_set.all()]) return HttpResponse(output.getvalue(), mimetype="text/plain")
def test_server_nocache(self): """Be careful in ConfigParser this parameter is treated as boolean. In standalone, it is defined as an integer !!! """ fhandler = StringIO() fhandler.writelines(["[server]\n", "no-cache = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) eq_(ktbs_config.getboolean('server', 'no-cache'), True)
def get_forms(self, url=None, *args, **kw): if url is None: url = self.get_url() f = StringIO() f.writelines(self.get_html(url, *args, **kw)) f.seek(0) forms = mechanize.ParseFile(f, url, backwards_compat=False) return [FORM(self, f) for f in forms]
def instance_ssh_keys(request, application_id, cookie): app = get_object_or_404(InstanceApplication, pk=application_id) if cookie != app.cookie: t = get_template("403.html") return HttpResponseForbidden(content=t.render(RequestContext(request))) output = StringIO() output.writelines( [k.key_line() for k in app.applicant.sshpublickey_set.all()]) return HttpResponse(output.getvalue(), mimetype="text/plain")
def transform(self, data, options=None): if self._validate(data) is None: return None if self.format is None: return None width = self.width height = self.height # Allow to override the size settings via the options dict if options is not None: width = options.get("width", width) height = options.get("height", height) result = TransformResult(None) try: # If we already got a file-like iterator use it if isinstance(data, file): orig = data else: orig = StringIO() orig.writelines(data) orig.seek(0) try: pil_image = Image.open(orig) except IOError, e: result.errors = str(e) log_debug("Error %s while transforming an Image in %s." % (str(e), self.name)) return result transparency = pil_image.info.get("transparency", False) if self.format in ["jpeg", "ppm"]: pil_image.draft("RGB", pil_image.size) pil_image = pil_image.convert("RGB") if width is None: width = pil_image.size[0] if height is None: height = pil_image.size[1] if width and height: pil_image.thumbnail((width, height), Image.ANTIALIAS) transformed = StringIO() # Only use transparency in the supported modes if self.format == "png" and pil_image.mode not in ("P", "L"): pil_image.save(transformed, self.format) else: pil_image.save(transformed, self.format, transparency=transparency) transformed.seek(0)
def consume_app(app, env): body = StringIO() start = [] def start_response(status, headers, exc_info=None): start.append((status, headers, exc_info)) return body.write extra_content = list(app(env, start_response)) body.writelines(extra_content) return start[0], body.getvalue()
def _get_buffer_from_pipe(process_pipe, regex): process_pipe = (a for a in process_pipe.split("\n") if a) if regex: try: compiled = re.compile(regex) except Exception: raise PyLocatedException("Invalid regular expression") process_pipe = (a for a in process_pipe if compiled.match(a) and a) buffer_ = StringIO() buffer_.writelines("\n".join(process_pipe)) return buffer_
def test_ns_prefix_one_item(self): fhandler = StringIO() fhandler.writelines(["[ns_prefix]\n", "foaf = http://xmlns.com/foaf/0.1/\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) # In this case, foaf is the first prefix added before the 2 # default ones added by get_ktbs_configuration() assert ktbs_config.options('ns_prefix'), ['foaf', '_' == 'skos'] assert ktbs_config.get('ns_prefix', 'foaf', 1) == 'http://xmlns.com/foaf/0.1/'
def send_merge_directive(self, item): from bzrlib.plugins.gtk.mergedirective import SendMergeDirectiveDialog from cStringIO import StringIO window = SendMergeDirectiveDialog(self.branch, self.revids[0]) if window.run() == Gtk.ResponseType.OK: outf = StringIO() outf.writelines(window.get_merge_directive().to_lines()) mail_client = self.branch.get_config().get_mail_client() mail_client.compose_merge_request(window.get_mail_to(), "[MERGE]", outf.getvalue()) window.destroy()
def test_server_nocache(self): """Be careful in ConfigParser this parameter is treated as boolean. In standalone, it is defined as an integer !!! """ fhandler = StringIO() fhandler.writelines(["[server]\n", "no-cache = true\n"]) fhandler.seek(0) ktbs_config = get_ktbs_configuration(fhandler) assert ktbs_config.getboolean('server', 'no-cache') == True
def instance_ssh_keys(request, application_id, cookie): # serves the sshkey of an applicant # in order to pass it to ganeti while creating the instance app = get_object_or_404(InstanceApplication, pk=application_id) if cookie != app.cookie: t = get_template("403.html") return HttpResponseForbidden(content=t.render(request=request)) output = StringIO() output.writelines([k.key_line() for k in app.applicant.sshpublickey_set.all()]) return HttpResponse(output.getvalue(), content_type="text/plain")
def instance_ssh_keys(request, application_id, cookie): # serves the sshkey of an applicant # in order to pass it to ganeti while creating the instance app = get_object_or_404(InstanceApplication, pk=application_id) if cookie != app.cookie: t = get_template("403.html") return HttpResponseForbidden(content=t.render(RequestContext(request))) output = StringIO() output.writelines( [k.key_line() for k in app.applicant.sshpublickey_set.all()]) return HttpResponse(output.getvalue(), content_type="text/plain")
def get_changelog(pkgdirurl, another=None, svn=True, rev=None, size=None, submit=False, sort=False, template=None, macros=[], exported=None, oldlog=False): """Generates the changelog for a given package URL @another: a stream with the contents of a changelog to be merged with the one generated @svn: enable changelog from svn @rev: generate the changelog with the changes up to the given revision @size: the number of revisions to be used (as in svn log --limit) @submit: defines whether the latest unreleased log entries should have the version parsed from the spec file @sort: should changelog entries be reparsed and sorted after appending the oldlog? @template: the path to the cheetah template used to generate the changelog from svn @macros: a list of tuples containing macros to be defined when parsing the version in the changelog @exported: the path of a directory containing an already existing checkout of the package, so that the spec file can be parsed from there @oldlog: if set it will try to append the old changelog file defined in oldurl in repsys.conf """ newlog = StringIO() if svn: rawsvnlog = svn2rpm(pkgdirurl, rev=rev, size=size, submit=submit, template=template, macros=macros, exported=exported) newlog.write(rawsvnlog) if another: newlog.writelines(another) if oldlog: newlog.writelines(get_old_log(pkgdirurl)) if sort: newlog.seek(0) newlog = sort_changelog(newlog) newlog.seek(0) return newlog
def get_patch(cls): patchfile = StringIO() for name in os.listdir(cls.OLD_DIR): oldname = os.path.join(cls.OLD_DIR, name) newname = os.path.join(cls.NEW_DIR, name) if not os.path.isfile(oldname): continue oldlines = open(oldname).readlines() newlines = open(newname).readlines() patchfile.writelines(difflib.unified_diff(oldlines, newlines, 'a/%s/%s' % (cls.BASE_NAME, name), 'b/%s/%s' % (cls.BASE_NAME, name))) return patchfile.getvalue()
def get_forms(self, url=None, data=None, timeout=None): """ return the forms """ if url: self.go(url, data, timeout) fifo = StringIO() fifo.writelines(self.get_html()) fifo.seek(0) forms = mechanize.ParseFile(fifo, self.get_url(), backwards_compat=False) return [Form(self, form) for form in forms]
def _read_file(cls, name, prompt='>'): file_h = None if name == '-': file_h = sys.stdin elif name == '+': file_h = StringIO() while True: content = raw_input('{} '.format(prompt.strip())) if not content: break file_h.writelines([content]) file_h.seek(0) else: file_h = file(name) return [line.strip() for line in file_h.readlines() if not line.startswith('#')]
def sendalert(alerts, hostname, attachments=[]): email = Email() report = StringIO() report.writelines(['\t\t Server Performance Alert [%s]\r\n' % hostname, '\r\n\r\n']) for x in alerts[1:]: print x report.writelines([x, '\r\n\r\n']); body = report.getvalue() report.close() subject = 'Server Performance Alert [%s] [%s] - %s' % (alerts[0], hostname, time.ctime()) email.sender = 'Gagein <*****@*****.**>' email.send(emails, subject, body, '', attachments)