class Lexer: def __init__(self, stream=None, text=''): if text: self.stream = StringIO(text) else: self.stream = stream self.next = self._get_char() def _get_char(self): c = self.stream.read(1) if c == '!': self.stream.read(1) # ignore escaped char c = self._get_char() return c def get_input(self): print('get_input called', end='') while self.next: result = self.next self.next = self._get_char() print("returns '{}'".format(result)) return result return '' def __iter__(self): return self def __next__(self): c = self.get_input() if c: return c else: raise StopIteration()
def test_inject_yum_mirrors(orig_repos_cfg, mirrors_dict, expected_repos_cfg, expected_repos_proxied_cfg): my_out_fil = StringIO() inject_yum_mirrors(mirrors_dict, StringIO(orig_repos_cfg), my_out_fil) my_out_fil.seek(0) assert expected_repos_cfg == my_out_fil.read() # Test when proxies are allowed my_out_fil = StringIO() inject_yum_mirrors(mirrors_dict, StringIO(orig_repos_cfg), my_out_fil, True) my_out_fil.seek(0) assert expected_repos_proxied_cfg == my_out_fil.read()
def test_inject_yum_mirrors( orig_repos_cfg, mirrors_dict, expected_repos_cfg, expected_repos_proxied_cfg ): my_out_fil = StringIO() inject_yum_mirrors(mirrors_dict, StringIO(orig_repos_cfg), my_out_fil) my_out_fil.seek(0) assert expected_repos_cfg == my_out_fil.read() # Test when proxies are allowed my_out_fil = StringIO() inject_yum_mirrors( mirrors_dict, StringIO(orig_repos_cfg), my_out_fil, True ) my_out_fil.seek(0) assert expected_repos_proxied_cfg == my_out_fil.read()
def test_contradictory_date_entries_warn(self): """4.8.5.3 Emit warning on contradictory date entries.""" stream = StringIO( wrap_document_text(construct_document_from(**{ "Author": { "ForeName": "John", "LastName": "Smith" }, "DateCompleted": { "Year": "2011", "Month": "01", "Day": "01" }, "DateRevised": { "Year": "2010", "Month": "01", "Day": "01" }, })) ) stderr = StringIO() self.patch(sys, "stderr", stderr) result = parsexml.parse_element_tree( parsexml.file_to_element_tree(stream) ) stderr.seek(0) stderr_out = stderr.read() self.assertThat(result["pubDate"], Is(None)) self.assertThat(result["reviseDate"], Is(None)) self.assertThat(stderr_out, Contains("is greater than"))
def test_contradictory_date_entries_warn(self): """4.8.5.3 Emit warning on contradictory date entries.""" stream = StringIO( wrap_document_text( construct_document_from( **{ "Author": { "ForeName": "John", "LastName": "Smith" }, "DateCompleted": { "Year": "2011", "Month": "01", "Day": "01" }, "DateRevised": { "Year": "2010", "Month": "01", "Day": "01" }, }))) stderr = StringIO() self.patch(sys, "stderr", stderr) result = parsexml.parse_element_tree( parsexml.file_to_element_tree(stream)) stderr.seek(0) stderr_out = stderr.read() self.assertThat(result["pubDate"], Is(None)) self.assertThat(result["reviseDate"], Is(None)) self.assertThat(stderr_out, Contains("is greater than"))
def dumps(collection, **json_args): """ Dump a collection of JSON objects into a string. Primarily included to match the `json` library's functionality. This may be more appropriate: >>> os.linesep.join(list(map(json.dumps, collection)) Parameters ---------- collection : iter Iterable that produces one JSON object per iteration. json_args : **json_args, optional Additional keyword arguments for `NLJWriter()`. Returns ------- str """ f = StringIO() # No __exit__ in older Python try: with NLJWriter(f, 'w', **json_args) as dst: for item in collection: dst.write(item) f.seek(0) return f.read() finally: f.close()
def get_pydoc(spec): obj = load_obj(spec) if obj: output = StringIO() pydoc.Helper(output=output).help(obj) output.seek(0) return output.read()
def __str__(self): stream = StringIO() pprint.pprint(self.extractors, stream) stream.seek(0) template_data = stream.read() if template_data: return "%s[\n%s\n]" % (self.__class__.__name__, template_data) return "%s[none]" % (self.__class__.__name__)
def test_ping(mping): args = argparse.Namespace(server='test.datafind.com:443') out = StringIO() main.ping(args, out) assert mping.called_with(host=args.server) out.seek(0) assert out.read().rstrip() == ( 'LDRDataFindServer at test.datafind.com:443 is alive')
def test_show(self): """Establish that the show method will properly route to an alternate file. """ sio = StringIO() ex = TowerCLIError("Fe fi fo fum; I smell the blood of an Englishman.") ex.show(file=sio) sio.seek(0) self.assertIn("Fe fi fo fum;", sio.read())
def test_show(self): """Establish that the show method will properly route to an alternate file. """ sio = StringIO() ex = TowerCLIError('Fe fi fo fum; I smell the blood of an Englishman.') ex.show(file=sio) sio.seek(0) self.assertIn('Fe fi fo fum;', sio.read())
def test_parsing_file_with_no_fields_throws(self): """4.5.3.4 Print error file has no relevant fields.""" stream = StringIO("<PubmedArticleSet><PubmedArticle>" "</PubmedArticle></PubmedArticleSet>") stderr = StringIO() self.patch(sys, "stderr", stderr) parsexml.parse_element_tree(parsexml.file_to_element_tree(stream)) stderr.seek(0) stderr_out = stderr.read() self.assertThat(stderr_out, Contains("skipping"))
def test_postprocess_cache_sft(): args = argparse.Namespace( type='TEST_1800SFT', lal_cache=False, names_only=False, frame_cache=False, gaps=None, ) out = StringIO() main.postprocess_cache(URLS, args, out) out.seek(0) assert out.read() == OUTPUT_URLS.replace('.gwf', '.sft')
class Lexer: def __init__(self, stream=None, text=''): if text: self.stream = StringIO(text) else: self.stream = stream self.next = self.get_char() def get_char(self): c = self.stream.read(1) if c == '!': ignore = self.stream.read(1) c = self.get_char() return c def get_input(self): while self.next: result = self.next self.next = self.get_char() yield result return
class FakeStorletFileIn(FakeStorletFile): def __init__(self, input_string, metadata): super(FakeStorletFileIn, self).__init__() self._input_string = StringIO(input_string) self._metadata = metadata self._pos = 0 def read(self, size=-1): return self._input_string.read(size) def get_metadata(self): return self._metadata
def push(name, api, domain): repo = git.Repo(os.getcwd()) branch = "temp-{}".format(str(uuid.uuid4())[:8]) set_deploy_branch(name, branch, api, domain) remote = git_url(name, api, domain) if is_dirty(): print("Nuking changes.") git.reset(repo, "hard") with TempBranch(branch, repo, delete=True): for fname, file_info in openshift_files.items(): with open(fname, 'w') as f: f.write(file_info.get("contents", "")) repo.stage(fname) repo.do_commit("Commit openshift files") push_out = StringIO() push_err = StringIO() print("Pushing to openshift (may take a few minutes)") git.push(repo, remote, "refs/heads/{}".format(branch), outstream=push_out, errstream=push_err) push_out.seek(0) out = push_out.read() if not re.match(r'^Push to .* successful.', out): print("There was a failure while pushing") print("---BEGIN STDERR---") push_err.seek(0) print(push_err.read()) print("---BEGIN STDOUT---") print(out) print("There was a failure while pushing") git.rm(repo, openshift_files.keys()) map(os.remove, openshift_files.keys()) return get_app(name, api, domain)['app_url']
class EncodedStringIO(object): def __init__(self): self._data = StringIO() self.encoding = "ascii" def read(self): return self._data.read() def write(self, data): return self._data.write(data) def flush(self): self._data.flush()
def inject_yum_mirrors_str(mirrors, yum_cfg_str, allow_proxy=False): """Inject yum mirrors into the given yum configuration string :param Mapping mirrors: A mapping of mirror names to URLs :param str yum_cfg: YUM configuration string to adjust :param bool allow_proxy: Wether to allow accessing the mirrors via HTTP proxies (defaults to False) :rtype: str :returns: A string of the adjusted configuration """ out_cfg = StringIO() inject_yum_mirrors(mirrors, StringIO(yum_cfg_str), out_cfg, allow_proxy) out_cfg.seek(0) return out_cfg.read()
def exceptions_csv(): data = StringIO() writer = csv.writer(data) writer.writerow(["Count", "Message", "Traceback", "Nodes"]) for exc in six.itervalues(runners.locust_runner.exceptions): nodes = ", ".join(exc["nodes"]) writer.writerow([exc["count"], exc["msg"], exc["traceback"], nodes]) data.seek(0) response = make_response(data.read()) file_name = "exceptions_{0}.csv".format(time()) disposition = "attachment;filename={0}".format(file_name) response.headers["Content-type"] = "text/csv" response.headers["Content-disposition"] = disposition return response
class TestStreamStructure(TestInMemoryStructure): def json(self, head): return jl( jd( (q(self.LIST_TOKEN), jl(*(q(w) for w in self.LIST))), (q(self.DICT_TOKEN), jd(*((q(k), q(v)) for k,v in self.DICT.items()))), (q(self.HEAD_TOKEN), jd(*((q(k), q(v)) for k,v in self.HEAD.items()))), ), q(head), ) def get_read_store(self): self.stream = StringIO(self.json(self.HEAD_TOKEN)) return structure.JSONStreamReadStructure(self.stream) def get_write_store(self): self.stream = StringIO() return structure.JSONStreamWriteStructure(self.stream) def verify_dict(self, received, expect): tools.assert_equal(dict(expect), received) def verify_list(self, received, expect): tools.assert_equal(list(expect), received) def verify_write(self, store, head): # Nothing written to stream until close(). tools.assert_equal(0, self.stream.tell()) tools.assert_equal('', self.stream.read()) store.close() self.stream.seek(0) tools.assert_equal(self.json(head), self.stream.read()) def test_no_close(self): store = self.get_read_store() tools.assert_raises(AttributeError, getattr, store, 'close')
class TMemoryBuffer(TTransportBase, CReadableTransport): """Wraps a cStringIO object as a TTransport. NOTE: Unlike the C++ version of this class, you cannot write to it then immediately read from it. If you want to read from a TMemoryBuffer, you must either pass a string to the constructor. TODO(dreiss): Make this work like the C++ version. """ def __init__(self, value=None): """value -- a value to read from for stringio If value is set, this will be a transport for reading, otherwise, it is for writing""" if value is not None: self._buffer = StringIO(value) else: self._buffer = StringIO() def isOpen(self): return not self._buffer.closed def open(self): pass def close(self): self._buffer.close() def read(self, sz): return self._buffer.read(sz) def write(self, buf): self._buffer.write(buf) def flush(self): pass def getvalue(self): return self._buffer.getvalue() # Implement the CReadableTransport interface. @property def cstringio_buf(self): return self._buffer def cstringio_refill(self, partialread, reqlen): # only one shot at reading... raise EOFError()
def test_postprocess_cache_format(fmt, result): # create namespace for parsing args = argparse.Namespace( type=None, lal_cache=False, names_only=False, frame_cache=False, gaps=None, ) if fmt: setattr(args, fmt, True) # run out = StringIO() assert not main.postprocess_cache(URLS, args, out) out.seek(0) assert out.read() == result
def to(cls, format, **options): """Serialize YANG container to the given output `format`. """ # pyang output plugins need an output stream stream = StringIO() plugin = PYANG_PLUGINS[format] # register plugin options according to pyang script optparser = OptionParser() plugin.add_opts(optparser) # pyang output plugins also need a pyang.Context ctx = pyang.Context(DummyRepository()) # which offers plugin-specific options (just take defaults) ctx.opts = optparser.parse_args([])[0] # ready to serialize! plugin.emit(ctx, [cls.to_statement(**options)], stream) # and return the resulting data stream.seek(0) return stream.read()
def test_filename(mfindurl): mfindurl.return_value = ['file:///test/X-test-0-10.gwf'] args = argparse.Namespace( server='test.datafind.com:443', filename='X-test-0-10.gwf', url_type='file', type=None, lal_cache=False, names_only=False, frame_cache=False, gaps=None, ) out = StringIO() main.filename(args, out) assert mfindurl.called_with(args.filename, urltype=args.url_type, on_missing='warn', host=args.server) out.seek(0) assert out.read().rstrip() == mfindurl.return_value[0]
def inject_yum_mirrors_str(mirrors, yum_cfg_str, allow_proxy=False, none_value=None): """Inject yum mirrors into the given yum configuration string :param Mapping mirrors: A mapping of mirror names to URLs :param str yum_cfg: YUM configuration string to adjust :param bool allow_proxy: Wether to allow accessing the mirrors via HTTP proxies (defaults to False) :param str none_value: Specify the 'no-proxy' value - see docstring for inject_yum_mirrors for full explanation :rtype: str :returns: A string of the adjusted configuration """ out_cfg = StringIO() inject_yum_mirrors(mirrors, StringIO(yum_cfg_str), out_cfg, allow_proxy, none_value) out_cfg.seek(0) return out_cfg.read()
def test_latest(mlatest): mlatest.return_value = ['file:///test/X-test-0-10.gwf'] args = argparse.Namespace( server='test.datafind.com:443', observatory='X', type='test', url_type='file', lal_cache=False, names_only=False, frame_cache=False, gaps=None, ) out = StringIO() main.latest(args, out) assert mlatest.called_with(args.observatory, args.type, urltype=args.url_type, on_missing='warn', host=args.server) out.seek(0) assert out.read().rstrip() == mlatest.return_value[0]
def _askYesNo(question=None): message = StringIO() while True: askString = "\r%s? (yes|no): " % (question) logging.debug("asking user: %s" % askString) message.write(askString) message.seek(0) raw = raw_input(message.read()) if not len(raw): continue answer = raw[0].lower() logging.debug("user answered read: %s" % (answer)) if answer not in 'yn': continue return answer == 'y'
def _getInputFromUser(param): """ this private func reads the data from the user for the given param """ loop = True userInput = None try: if param.USE_DEFAULT: logging.debug("setting default value (%s) for key (%s)" % (mask(param.DEFAULT_VALUE), param.CONF_NAME)) controller.CONF[param.CONF_NAME] = param.DEFAULT_VALUE else: while loop: # If the value was not supplied by the command line flags if param.CONF_NAME not in commandLineValues: message = StringIO() message.write(param.PROMPT) val_list = param.VALIDATORS or [] if(validators.validate_regexp not in val_list and param.OPTION_LIST): message.write(" [%s]" % "|".join(param.OPTION_LIST)) if param.DEFAULT_VALUE: message.write(" [%s] " % (str(param.DEFAULT_VALUE))) message.write(": ") message.seek(0) # mask password or hidden fields if (param.MASK_INPUT): userInput = getpass.getpass("%s :" % (param.PROMPT)) else: userInput = raw_input(message.read()) else: userInput = commandLineValues[param.CONF_NAME] # If DEFAULT_VALUE is set and user did not input anything if userInput == "" and len(str(param.DEFAULT_VALUE)) > 0: userInput = param.DEFAULT_VALUE # Param processing userInput = process_param_value(param, userInput) # If param requires validation try: validate_param_value(param, userInput) controller.CONF[param.CONF_NAME] = userInput loop = False except ParamValidationError: if param.LOOSE_VALIDATION: # If validation failed but LOOSE_VALIDATION is true, ask user answer = _askYesNo("User input failed validation, " "do you still wish to use it") loop = not answer if answer: controller.CONF[param.CONF_NAME] = userInput continue else: if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] else: # Delete value from commandLineValues so that we will prompt the user for input if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] loop = True except KeyboardInterrupt: # add the new line so messages wont be displayed in the same line as the question print("") raise except: logging.error(traceback.format_exc()) raise Exception(output_messages.ERR_EXP_READ_INPUT_PARAM % (param.CONF_NAME))
class InputFile(object): max_buffer_size = 1024 * 1024 def __init__(self, rfile, length): """File-like object used to provide a seekable view of request body data""" self._file = rfile self.length = length self._file_position = 0 if length > self.max_buffer_size: self._buf = tempfile.TemporaryFile() else: self._buf = StringIO() @property def _buf_position(self): rv = self._buf.tell() assert rv <= self._file_position return rv def read(self, bytes=-1): assert self._buf_position <= self._file_position if bytes < 0: bytes = self.length - self._buf_position bytes_remaining = min(bytes, self.length - self._buf_position) if bytes_remaining == 0: return "" if self._buf_position != self._file_position: buf_bytes = min(bytes_remaining, self._file_position - self._buf_position) old_data = self._buf.read(buf_bytes) bytes_remaining -= buf_bytes else: old_data = "" assert bytes_remaining == 0 or self._buf_position == self._file_position, ( "Before reading buffer position (%i) didn't match file position (%i)" % (self._buf_position, self._file_position)) new_data = self._file.read(bytes_remaining) self._buf.write(new_data) self._file_position += bytes_remaining assert bytes_remaining == 0 or self._buf_position == self._file_position, ( "After reading buffer position (%i) didn't match file position (%i)" % (self._buf_position, self._file_position)) return old_data + new_data def tell(self): return self._buf_position def seek(self, offset): if offset > self.length or offset < 0: raise ValueError if offset <= self._file_position: self._buf.seek(offset) else: self.read(offset - self._file_position) def readline(self, max_bytes=None): if max_bytes is None: max_bytes = self.length - self._buf_position if self._buf_position < self._file_position: data = self._buf.readline(max_bytes) if data.endswith("\n") or len(data) == max_bytes: return data else: data = "" assert self._buf_position == self._file_position initial_position = self._file_position found = False buf = [] max_bytes -= len(data) while not found: readahead = self.read(min(2, max_bytes)) max_bytes -= len(readahead) for i, c in enumerate(readahead): if c == "\n": buf.append(readahead[:i + 1]) found = True break if not found: buf.append(readahead) if not readahead or not max_bytes: break new_data = "".join(buf) data += new_data self.seek(initial_position + len(new_data)) return data def readlines(self): rv = [] while True: data = self.readline() if data: rv.append(data) else: break return rv def next(self): data = self.readline() if data: return data else: raise StopIteration def __iter__(self): return self
def open(self,f=None,mode='r',encrypt=False,sign=False): """write: encrypt = list of recipients, sign = sender or bool(default_key) read: encrypt = encrypted data expected, sign= expected key or True=defaultkey """ from six.moves import StringIO from six import next, PY3, BytesIO self.mode,self.encrypt,self.sign = mode,encrypt,sign if self.required==True: self.required = self.columns if encrypt or sign: assert self.gpg, 'gpg not intialized' self.origfile = self.file = f assert mode in ('r','w'), 'invalid mode' if mode=='r': if sign: if sign==True: fingerprint = self.gpg.default_key if type(fingerprint) == tuple: fingerprint = fingerprint[0] else: if type(sign) == tuple: sign = sign[0] fingerprint = self.gpg.find_key(sign) assert fingerprint, "sender key not found" if self.fileformat=='csv': import re if encrypt: if PY3 and isinstance(f,StringIO): result = self.gpg.decrypt_str(f.getvalue()) else: result = self.gpg.decrypt_file(f) assert result.ok, "decryption failed" if sign: assert result.valid and result.fingerprint==fingerprint, 'invalid signature' f = StringIO(str(result)) elif sign: if PY3 and isinstance(f,StringIO): result = self.gpg.verify_str(f.getvalue()) f = StringIO(self.gpg.without_signature(f.getvalue())) else: result = self.gpg.verify_file(f) f.seek(0) f = StringIO(self.gpg.without_signature(f.read())) assert result.valid and result.fingerprint==fingerprint, 'invalid signature' self.file = f dialect = self.dialect if not dialect: pos = f.tell() dialect = csv.Sniffer().sniff(f.read(1024)) f.seek(pos) # rewind reader = csv.reader(f,dialect=dialect) preamble = next(reader) assert len(preamble), 'invalid file format' assert preamble[0]==self.dataformat, "file format not supported" preamble = re.match(r'^(\d+).(\d+)',preamble[1]) assert int(preamble.group(2))<=self.version[0], "format version not supported" fields = next(reader) self.csv = reader else: # self.fileformat in ('json','jsondict','json-file','jsondict-file'): import json if self.fileformat in ('json-file','jsondict-file'): self.file = f = json.load(f) data, encrypted, signed, result = json_decrypt(f,self.gpg) assert data, 'invalid input' if encrypt: assert encrypted==bool(encrypt), 'encryption expected' if sign: assert signed==bool(sign), 'signature expected' assert result.valid and result.fingerprint==fingerprint, 'invalid signature' assert 'format' in data and data['format']==self.dataformat, "file format not supported" assert 'version' in data and data['version'][0]<=self.version[0], "file version not supported" assert 'fields' in data , "fields missing" fields = data['fields'] self.rows = data['data'] columns, unknown = [], [] for field in fields: if field in self.columns: columns.append(field) elif self.ignore: unknown.append(field) else: assert False, "unknown field '%s'" % field if self.required: for field in self.required: assert field in columns, "missing required field '%s'" % field self.fields = fields self.read_columns = (columns,unknown) elif mode=='w': assert self.fileformat in ('json','jsondict') or self.file, 'file missing' if self.fileformat=='csv': if encrypt or sign: self.file = StringIO() else: self.file = f self.csv = csv.writer(self.file,lineterminator='\n',dialect=self.dialect) self.csv.writerow((self.dataformat,'%i.%i' % tuple(self.version))) self.csv.writerow(self.columns) else: # self.fileformat in ('json','jsondict'): self.rows = []
describe "mainline": it "catches DelfickError errors and prints them nicely": fle = StringIO() class MyApp(App): def execute(slf, args_obj, args_dict, extra_args, handler): raise DelfickError("Well this should work", blah=1, _errors=[DelfickError("SubError", meh=2), DelfickError("SubError2", stuff=3)]) try: MyApp().mainline([], print_errors_to=fle) assert False, "This should have failed" except SystemExit as error: self.assertEqual(error.code, 1) fle.flush() fle.seek(0) self.assertEqual(fle.read(), dedent(""" !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Something went wrong! -- DelfickError \t"Well this should work"\tblah=1 errors: ======= \t"SubError"\tmeh=2 ------- \t"SubError2"\tstuff=3 ------- """)) it "Converts KeyboardInterrupt into a UserQuit": fle = StringIO() class MyApp(App):
def request(self, method, path, contents, headers, decode_json=False, stream=False, query=None, cdn=False): """ See :py:func:`swiftly.client.client.Client.request` """ if query: path += '?' + '&'.join( ('%s=%s' % (quote(k), quote(v)) if v else quote(k)) for k, v in sorted(six.iteritems(query))) reset_func = self._default_reset_func if isinstance(contents, six.string_types): contents = StringIO(contents) tell = getattr(contents, 'tell', None) seek = getattr(contents, 'seek', None) if tell and seek: try: orig_pos = tell() reset_func = lambda: seek(orig_pos) except Exception: tell = seek = None elif not contents: reset_func = lambda: None status = 0 reason = 'Unknown' attempt = 0 while attempt < self.attempts: attempt += 1 if time() >= self.conn_discard: self.storage_conn = None self.cdn_conn = None if cdn: conn = self.cdn_conn conn_path = self.cdn_path else: conn = self.storage_conn conn_path = self.storage_path if not conn: parsed, conn = self._connect(cdn=cdn) if conn: if cdn: self.cdn_conn = conn self.cdn_path = conn_path = parsed.path else: self.storage_conn = conn self.storage_path = conn_path = parsed.path else: raise self.HTTPException( '%s %s failed: No connection' % (method, path)) self.conn_discard = time() + 4 titled_headers = dict((k.title(), v) for k, v in six.iteritems({ 'User-Agent': self.user_agent, 'X-Auth-Token': self.auth_token})) if headers: titled_headers.update( (k.title(), v) for k, v in six.iteritems(headers)) try: if not hasattr(contents, 'read'): if method not in self.no_content_methods and contents and \ 'Content-Length' not in titled_headers and \ 'Transfer-Encoding' not in titled_headers: titled_headers['Content-Length'] = str( len(contents or '')) verbose_headers = ' '.join( '%s: %s' % (k, v) for k, v in sorted(six.iteritems(titled_headers))) self.verbose( '> %s %s %s', method, conn_path + path, verbose_headers) conn.request( method, conn_path + path, contents, titled_headers) else: conn.putrequest(method, conn_path + path) content_length = None for h, v in sorted(six.iteritems(titled_headers)): if h == 'Content-Length': content_length = int(v) conn.putheader(h, v) if method not in self.no_content_methods and \ content_length is None: titled_headers['Transfer-Encoding'] = 'chunked' conn.putheader('Transfer-Encoding', 'chunked') conn.endheaders() verbose_headers = ' '.join( '%s: %s' % (k, v) for k, v in sorted(six.iteritems(titled_headers))) self.verbose( '> %s %s %s', method, conn_path + path, verbose_headers) if method not in self.no_content_methods and \ content_length is None: chunk = contents.read(self.chunk_size) while chunk: conn.send('%x\r\n%s\r\n' % (len(chunk), chunk)) chunk = contents.read(self.chunk_size) conn.send('0\r\n\r\n') else: left = content_length or 0 while left > 0: size = self.chunk_size if size > left: size = left chunk = contents.read(size) if not chunk: raise IOError('Early EOF from input') conn.send(chunk) left -= len(chunk) resp = conn.getresponse() status = resp.status reason = resp.reason hdrs = headers_to_dict(resp.getheaders()) if stream: value = resp else: value = resp.read() resp.close() except Exception as err: status = 0 reason = '%s %s' % (type(err), str(err)) hdrs = {} value = None self.verbose('< %s %s', status or '-', reason) if status == 401: if stream: value.close() conn.close() self.auth() attempt -= 1 elif status and status // 100 != 5: if not stream and decode_json and status // 100 == 2: if value: value = json.loads(value.decode('utf-8')) else: value = None self.conn_discard = time() + 4 return (status, reason, hdrs, value) else: if stream and value: value.close() conn.close() if reset_func: reset_func() self.sleep(2 ** attempt) raise self.HTTPException( '%s %s failed: %s %s' % (method, path, status, reason))
def _object(self, method, container_name, object_name, contents, headers, stream, query, cdn): if cdn: raise Exception('CDN not yet supported with LocalClient') fs_container = _encode_name(container_name) fs_object = _encode_name(object_name) status = 503 reason = 'Internal Server Error' hdrs = {} body = '' if method in ('GET', 'HEAD'): local_path = path_join(self.local_path, fs_container, fs_object) if not exists(local_path): status = 404 reason = 'Not Found' else: content_length = getsize(local_path) hdrs['content-length'] = str(content_length) status = 200 if content_length else 204 if method == 'HEAD': body = '' else: body = open(local_path, 'rb') if not stream: body = body.read() elif method == 'PUT': fs_object_path = path_join( self.local_path, fs_container, fs_object) temp_path = path_join( self.local_path, fs_container, '_-temp' + uuid4().hex) content_length = headers.get('content-length') if content_length is not None: content_length = int(content_length) fp = open(temp_path, 'wb') left = content_length written = 0 while left is None or left > 0: if left is not None: chunk = contents.read(max(left, self.chunk_size)) left -= len(chunk) else: chunk = contents.read(self.chunk_size) if not chunk: break fp.write(chunk) written += len(chunk) fp.flush() fp.close() if content_length is not None and written != content_length: unlink(temp_path) status = 503 reason = 'Internal Server Error' body = 'Wrote %d bytes when Content-Length was %d' % ( written, content_length) else: db = self._get_db() with lock_dir(self.local_path): if isfile(fs_object_path): rename(temp_path, fs_object_path) db.execute(''' UPDATE object_entry SET put_timestamp = ?, byte_count = ? WHERE container_name = ? AND object_name = ? ''', (time(), written, container_name, object_name)) else: rename(temp_path, fs_object_path) db.execute(''' INSERT INTO object_entry ( container_name, object_name, put_timestamp, byte_count) VALUES (?, ?, ?, ?) ''', (container_name, object_name, time(), written)) db.commit() status = 201 reason = 'Created' body = '' hdrs['content-length'] = str(len(body)) elif method == 'DELETE': fs_object_path = path_join( self.local_path, fs_container, fs_object) if not isfile(fs_object_path): status = 404 reason = 'Not Found' else: db = self._get_db() with lock_dir(self.local_path): if not isfile(fs_object_path): status = 404 reason = 'Not Found' else: unlink(fs_object_path) db.execute(''' DELETE FROM object_entry WHERE container_name = ? AND object_name = ? ''', (container_name, object_name)) db.commit() status = 204 reason = 'No Content' body = '' hdrs['content-length'] = str(len(body)) if stream and not hasattr(body, 'read'): body = StringIO(body) elif not stream and hasattr(body, 'read'): body = body.read() return status, reason, hdrs, body
def request(self, method, path, contents, headers, decode_json=False, stream=False, query=None, cdn=False): """ See :py:func:`swiftly.client.client.Client.request` """ if query: path += '?' + '&'.join( ('%s=%s' % (quote(k), quote(v)) if v else quote(k)) for k, v in sorted(six.iteritems(query))) reset_func = self._default_reset_func if isinstance(contents, six.string_types): contents = StringIO(contents) tell = getattr(contents, 'tell', None) seek = getattr(contents, 'seek', None) if tell and seek: try: orig_pos = tell() reset_func = lambda: seek(orig_pos) except Exception: tell = seek = None elif not contents: reset_func = lambda: None status = 0 reason = 'Unknown' attempt = 0 while attempt < self.attempts: attempt += 1 if time() >= self.conn_discard: self.storage_conn = None self.cdn_conn = None if cdn: conn = self.cdn_conn conn_path = self.cdn_path else: conn = self.storage_conn conn_path = self.storage_path if not conn: parsed, conn = self._connect(cdn=cdn) if conn: if cdn: self.cdn_conn = conn self.cdn_path = conn_path = parsed.path else: self.storage_conn = conn self.storage_path = conn_path = parsed.path else: raise self.HTTPException('%s %s failed: No connection' % (method, path)) self.conn_discard = time() + 4 titled_headers = dict( (k.title(), v) for k, v in six.iteritems({ 'User-Agent': self.user_agent, 'X-Auth-Token': self.auth_token })) if headers: titled_headers.update( (k.title(), v) for k, v in six.iteritems(headers)) try: if not hasattr(contents, 'read'): if method not in self.no_content_methods and contents and \ 'Content-Length' not in titled_headers and \ 'Transfer-Encoding' not in titled_headers: titled_headers['Content-Length'] = str( len(contents or '')) verbose_headers = ' '.join( '%s: %s' % (k, v) for k, v in sorted(six.iteritems(titled_headers))) self.verbose('> %s %s %s', method, conn_path + path, verbose_headers) conn.request(method, conn_path + path, contents, titled_headers) else: conn.putrequest(method, conn_path + path) content_length = None for h, v in sorted(six.iteritems(titled_headers)): if h == 'Content-Length': content_length = int(v) conn.putheader(h, v) if method not in self.no_content_methods and \ content_length is None: titled_headers['Transfer-Encoding'] = 'chunked' conn.putheader('Transfer-Encoding', 'chunked') conn.endheaders() verbose_headers = ' '.join( '%s: %s' % (k, v) for k, v in sorted(six.iteritems(titled_headers))) self.verbose('> %s %s %s', method, conn_path + path, verbose_headers) if method not in self.no_content_methods and \ content_length is None: chunk = contents.read(self.chunk_size) while chunk: conn.send('%x\r\n%s\r\n' % (len(chunk), chunk)) chunk = contents.read(self.chunk_size) conn.send('0\r\n\r\n') else: left = content_length or 0 while left > 0: size = self.chunk_size if size > left: size = left chunk = contents.read(size) if not chunk: raise IOError('Early EOF from input') conn.send(chunk) left -= len(chunk) resp = conn.getresponse() status = resp.status reason = resp.reason hdrs = headers_to_dict(resp.getheaders()) if stream: value = resp else: value = resp.read() resp.close() except Exception as err: status = 0 reason = '%s %s' % (type(err), str(err)) hdrs = {} value = None self.verbose('< %s %s', status or '-', reason) self.verbose('< %s', hdrs) if status == 401: if stream: value.close() conn.close() self.auth() attempt -= 1 elif status and status // 100 != 5: if not stream and decode_json and status // 100 == 2: if value: value = json.loads(value.decode('utf-8')) else: value = None self.conn_discard = time() + 4 return (status, reason, hdrs, value) else: if stream and value: value.close() conn.close() if reset_func: reset_func() self.sleep(2**attempt) raise self.HTTPException('%s %s failed: %s %s' % (method, path, status, reason))
class TFramedTransport(TTransportBase, CReadableTransport): """Class that wraps another transport and frames its I/O when writing.""" def __init__( self, trans, ): self.__trans = trans self.__rbuf = StringIO() self.__wbuf = StringIO() def isOpen(self): return self.__trans.isOpen() def open(self): return self.__trans.open() def close(self): return self.__trans.close() def read(self, sz): ret = self.__rbuf.read(sz) if len(ret) != 0: return ret self.readFrame() return self.__rbuf.read(sz) def readFrame(self): buff = self.__trans.readAll(4) sz, = unpack('!i', buff) self.__rbuf = StringIO(self.__trans.readAll(sz)) def write(self, buf): self.__wbuf.write(buf) def flush(self): wout = self.__wbuf.getvalue() wsz = len(wout) # reset wbuf before write/flush to preserve state on underlying failure self.__wbuf = StringIO() # N.B.: Doing this string concatenation is WAY cheaper than making # two separate calls to the underlying socket object. Socket writes in # Python turn out to be REALLY expensive, but it seems to do a pretty # good job of managing string buffer operations without excessive copies buf = pack("!i", wsz) + wout self.__trans.write(buf) self.__trans.flush() # Implement the CReadableTransport interface. @property def cstringio_buf(self): return self.__rbuf def cstringio_refill(self, prefix, reqlen): # self.__rbuf will already be empty here because fastbinary doesn't # ask for a refill until the previous buffer is empty. Therefore, # we can start reading new frames immediately. while len(prefix) < reqlen: self.readFrame() prefix += self.__rbuf.getvalue() self.__rbuf = StringIO(prefix) return self.__rbuf
def open(self,f=None,mode='r',encrypt=False,sign=False): """write: encrypt =bool or list of recipients, sign = sender or bool(default_key) read: encrypt = bool encrypted data expected, sign= expected key or True=defaultkey """ from six.moves import StringIO from six import next, PY3, BytesIO self.mode,self.encrypt,self.sign = mode,encrypt,sign if self.required is True: self.required = self.columns if encrypt or sign: assert self.gpg, 'gpg not intialized' self.origfile = self.file = f assert mode in ('r','w'), 'invalid mode' if mode=='r': if sign: if sign is True: fingerprint = self.gpg.default_key if type(fingerprint) == tuple: fingerprint = fingerprint[0] else: if type(sign) == tuple: sign = sign[0] fingerprint = self.gpg.find_key(sign) assert fingerprint, "sender key not found" if self.fileformat=='csv': import re if encrypt: if PY3 and isinstance(f,StringIO): result = self.gpg.decrypt_str(f.getvalue()) else: result = self.gpg.decrypt_file(f) assert result.ok, "decryption failed" if sign: assert result.valid and result.fingerprint==fingerprint, 'invalid signature' f = StringIO(str(result)) elif sign: if PY3 and isinstance(f,StringIO): result = self.gpg.verify_str(f.getvalue()) f = StringIO(self.gpg.without_signature(f.getvalue())) else: result = self.gpg.verify_file(f) f.seek(0) f = StringIO(self.gpg.without_signature(f.read())) assert result.valid and result.fingerprint==fingerprint, 'invalid signature' self.file = f dialect = self.dialect if not dialect: pos = f.tell() dialect = csv.Sniffer().sniff(f.read(1024)) f.seek(pos) # rewind if not PY3: import unicodecsv reader = unicodecsv.reader else: reader = csv.reader reader = reader(f,dialect=dialect) preamble = next(reader) assert len(preamble), 'invalid file format' assert preamble[0]==self.dataformat, "file format not supported" preamble = re.match(r'^(\d+).(\d+)',preamble[1]) assert int(preamble.group(2))<=self.version[0], "format version not supported" fields = next(reader) self.csv = reader else: # self.fileformat in ('json','jsondict','json-file','jsondict-file'): import json if self.fileformat in ('json-file','jsondict-file'): self.file = f = json.load(f) data, encrypted, signed, result = json_decrypt(f,self.gpg) assert data, 'invalid input' if encrypt: assert encrypted==bool(encrypt), 'encryption expected' if sign: assert signed==bool(sign), 'signature expected' assert result.valid and result.fingerprint==fingerprint, 'invalid signature' assert 'format' in data and data['format']==self.dataformat, "file format not supported" assert 'version' in data and data['version'][0]<=self.version[0], "file version not supported" assert 'fields' in data , "fields missing" fields = data['fields'] self.rows = data['data'] columns, unknown = [], [] for field in fields: if field in self.columns: columns.append(field) elif self.ignore: unknown.append(field) else: assert False, "unknown field '%s'" % field if self.required: for field in self.required: assert field in columns, "missing required field '%s'" % field self.fields = fields self.read_columns = (columns,unknown) elif mode=='w': assert self.fileformat in ('json','jsondict') or self.file, 'file missing' if self.fileformat=='csv': if encrypt or sign: self.file = StringIO() else: self.file = f if not PY3: import unicodecsv writer = unicodecsv.writer else: writer = csv.writer self.csv = writer(self.file,lineterminator='\n',dialect=self.dialect) self.csv.writerow((self.dataformat,'%i.%i' % tuple(self.version))) self.csv.writerow(self.columns) else: # self.fileformat in ('json','jsondict'): self.rows = []
class TSaslClientTransport(TTransportBase, CReadableTransport): """ SASL transport """ START = 1 OK = 2 BAD = 3 ERROR = 4 COMPLETE = 5 def __init__(self, transport, host, service, mechanism='GSSAPI', **sasl_kwargs): """ transport: an underlying transport to use, typically just a TSocket host: the name of the server, from a SASL perspective service: the name of the server's service, from a SASL perspective mechanism: the name of the preferred mechanism to use All other kwargs will be passed to the puresasl.client.SASLClient constructor. """ from puresasl.client import SASLClient self.transport = transport self.sasl = SASLClient(host, service, mechanism, **sasl_kwargs) self.__wbuf = StringIO() self.__rbuf = StringIO() def open(self): if not self.transport.isOpen(): self.transport.open() self.send_sasl_msg(self.START, self.sasl.mechanism) self.send_sasl_msg(self.OK, self.sasl.process()) while True: status, challenge = self.recv_sasl_msg() if status == self.OK: self.send_sasl_msg(self.OK, self.sasl.process(challenge)) elif status == self.COMPLETE: if not self.sasl.complete: raise TTransportException( "The server erroneously indicated " "that SASL negotiation was complete") else: break else: raise TTransportException( "Bad SASL negotiation status: %d (%s)" % (status, challenge)) def send_sasl_msg(self, status, body): header = pack(">BI", status, len(body)) self.transport.write(header + body) self.transport.flush() def recv_sasl_msg(self): header = self.transport.readAll(5) status, length = unpack(">BI", header) if length > 0: payload = self.transport.readAll(length) else: payload = "" return status, payload def write(self, data): self.__wbuf.write(data) def flush(self): data = self.__wbuf.getvalue() encoded = self.sasl.wrap(data) self.transport.write(''.join((pack("!i", len(encoded)), encoded))) self.transport.flush() self.__wbuf = StringIO() def read(self, sz): ret = self.__rbuf.read(sz) if len(ret) != 0: return ret self._read_frame() return self.__rbuf.read(sz) def _read_frame(self): header = self.transport.readAll(4) length, = unpack('!i', header) encoded = self.transport.readAll(length) self.__rbuf = StringIO(self.sasl.unwrap(encoded)) def close(self): self.sasl.dispose() self.transport.close() # based on TFramedTransport @property def cstringio_buf(self): return self.__rbuf def cstringio_refill(self, prefix, reqlen): # self.__rbuf will already be empty here because fastbinary doesn't # ask for a refill until the previous buffer is empty. Therefore, # we can start reading new frames immediately. while len(prefix) < reqlen: self._read_frame() prefix += self.__rbuf.getvalue() self.__rbuf = StringIO(prefix) return self.__rbuf
def _getInputFromUser(param): """ this private func reads the data from the user for the given param """ loop = True userInput = None try: if param.USE_DEFAULT: logging.debug("setting default value (%s) for key (%s)" % (mask(param.DEFAULT_VALUE), param.CONF_NAME)) controller.CONF[param.CONF_NAME] = param.DEFAULT_VALUE else: while loop: # If the value was not supplied by the command line flags if param.CONF_NAME not in commandLineValues: message = StringIO() message.write(param.PROMPT) val_list = param.VALIDATORS or [] if (validators.validate_regexp not in val_list and param.OPTION_LIST): message.write(" [%s]" % "|".join(param.OPTION_LIST)) if param.DEFAULT_VALUE: message.write(" [%s] " % (str(param.DEFAULT_VALUE))) message.write(": ") message.seek(0) # mask password or hidden fields if (param.MASK_INPUT): userInput = getpass.getpass("%s :" % (param.PROMPT)) else: userInput = raw_input(message.read()) else: userInput = commandLineValues[param.CONF_NAME] # If DEFAULT_VALUE is set and user did not input anything if userInput == "" and len(str(param.DEFAULT_VALUE)) > 0: userInput = param.DEFAULT_VALUE # Param processing userInput = process_param_value(param, userInput) # If param requires validation try: validate_param_value(param, userInput) controller.CONF[param.CONF_NAME] = userInput loop = False except ParamValidationError: if param.LOOSE_VALIDATION: # If validation failed but LOOSE_VALIDATION is true, ask user answer = _askYesNo("User input failed validation, " "do you still wish to use it") loop = not answer if answer: controller.CONF[param.CONF_NAME] = userInput continue else: if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] else: # Delete value from commandLineValues so that we will prompt the user for input if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] loop = True except KeyboardInterrupt: # add the new line so messages wont be displayed in the same line as the question print("") raise except Exception: logging.error(traceback.format_exc()) raise Exception(output_messages.ERR_EXP_READ_INPUT_PARAM % (param.CONF_NAME))