def strip_line(clazz, text, strip_head = False, strip_tail = False): 'Strip comments from one line.' buf = StringIO() for token in string_lexer.tokenize(text, 'comments_strip_line', options = string_lexer.KEEP_QUOTES): if token.token_type not in [ string_lexer.TOKEN_DONE, string_lexer.TOKEN_COMMENT ]: buf.write(token.value) return string_util.strip_ends(buf.getvalue(), strip_head = strip_head, strip_tail = strip_tail)
def merge(clazz, lines): 'Merge a sequence of lines into one. Continuation flags are cleared' buf = StringIO() for line in lines: text = string_util.remove_tail(line.text, clazz.CONTINUATION_CHAR) buf.write(text) return clazz(lines[0].line_number, buf.getvalue())
def checksum(self): 'Return a checksum of the files and file checksums themselves.' buf = StringIO() for value in self: buf.write(value.filename) buf.write(value.checksum) return hashlib.sha256(buf.getvalue().encode('utf-8')).hexdigest()
def to_string(self): buf = StringIO() buf.write('# %s\n' % (self.name)) buf.write('name: %s\n' % (self.name)) buf.write('unixpath: %s\n' % (':'.join(self.unixpath))) buf.write('pythonpath: %s\n' % (':'.join(self.pythonpath))) buf.write('requires: %s\n' % (' '.join(sorted([ r for r in self.requires ])))) return buf.getvalue()
def _spacify(clazz, s): buf = StringIO() for c in s: if c == '\n': buf.write(c) else: buf.write(' ') return buf.getvalue()
def _buf_to_str(clazz, buf, col_width): col_buf = StringIO() for i in range(0, col_width): c = buf.read(1) if c: col_buf.write(c) else: break return col_buf.getvalue().strip() or None
def to_string(self, delimiter=' '): buf = StringIO() first = True for req in iter(self): if not first: buf.write(delimiter) first = False buf.write(str(req)) return buf.getvalue()
def to_string(self, delimiter = '=', value_delimiter = ';', quote = False): buf = StringIO() first = True for kv in iter(self): if not first: buf.write(value_delimiter) first = False buf.write(kv.to_string(delimiter = delimiter, quote_value = quote)) return buf.getvalue()
def _to_string_with_mask(self, depth, indent, quote): spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.mask) buf.write(value_parsing.MASK_DELIMITER) buf.write(' ') buf.write(self.value_to_string(quote=quote)) return buf.getvalue()
def to_string(self, delimiter = ' '): buf = StringIO() first = True for item in iter(self): if not first: buf.write(delimiter) first = False buf.write(str(item)) return buf.getvalue()
def __str__(self): buf = StringIO() if self.epoch != 0: buf.write(str(self.epoch)) buf.write(':') buf.write(str(self.upstream_version)) if self.revision != 0: buf.write('-') buf.write(str(self.revision)) return buf.getvalue()
def dumps(d, delimiter = '\n'): if not d: return '' buf = StringIO() longest_key = max([ len(key) for key in d.keys() ]) fmt = '%%%ds: %%s' % (longest_key) for k, v in sorted(d.items()): buf.write(fmt % (k, v)) buf.write(delimiter) return buf.getvalue()
def replace_punctuation(clazz, s, replacement): 'Replace punctuation in s with replacement.' buf = StringIO() for c in s: if c in string.punctuation: if replacement: buf.write(replacement) else: buf.write(c) return buf.getvalue()
def name_from_address(clazz, address): if not address.endswith('.git'): raise ValueError('not a git address: %s' % (address)) buf = StringIO() for c in string_util.reverse(address): if c in ':/': break buf.write(c) last_part = string_util.reverse(buf.getvalue()) return string_util.remove_tail(last_part, '.git')
def __str__(self): buf = StringIO() buf.write(self.action.rjust(2)) buf.write(' ') buf.write(self.filename) for i, arg in enumerate(self.args): if i == 0: buf.write(' ') assert string_util.is_string(arg) buf.write(arg) return buf.getvalue()
def to_string(clazz, l, delimiter = ';', quote = False): buf = StringIO() first = True for s in iter(l): if not compat.is_string(s): raise TypeError('not a string: %s - %s' % (str(s), type(s))) if not first: buf.write(delimiter) first = False if quote: s = string_util.quote_if_needed(s) buf.write(s) return buf.getvalue()
def to_string_colon_format(self): req_no_system_mask = self.clone_replace_system_mask(None) buf = StringIO() if self.system_mask: buf.write(self.system_mask) else: buf.write('all') buf.write(': ') buf.write(str(req_no_system_mask)) return buf.getvalue()
def _to_string_one_line(self, depth, indent): assert len(self.values) == 1 assert self.values[0].mask == None spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.key) buf.write(':') buf.write(' ') buf.write(self.values[0].value_to_string()) return buf.getvalue()
def to_string(self, depth = 0, indent = 2, data_func = None): buf = StringIO() buf.write(' ' * depth) if data_func: data_str = data_func(self.data) else: data_str = str(self.data) buf.write(data_str) buf.write('\n') for child in self.children: buf.write(child.to_string(depth + indent, data_func = data_func)) return buf.getvalue()
def value_to_string(self, quote, include_properties = True): buf = StringIO() ps = None for i, value in enumerate(self): if i != 0: buf.write(' ') buf.write(value.value_to_string(quote, include_properties = False)) if ps is None: ps = value.properties_to_string() if include_properties and ps: buf.write(' ') buf.write(ps) return buf.getvalue()
def fit_line(clazz, text, width): assert '\n' not in text lines = [] buf = StringIO() for token in lexer.tokenize(text, 'text_fit', options = lexer.KEEP_QUOTES | lexer.IGNORE_COMMENTS): if token.token_type == lexer.TOKEN_SPACE: if (buf.tell() + len(token.value)) > width: lines.append(buf.getvalue().strip()) buf = StringIO() else: buf.write(token.value) if token.token_type == lexer.TOKEN_STRING: if (buf.tell() + len(token.value)) > width: lines.append(buf.getvalue().strip()) buf = StringIO() buf.write(token.value) elif token.token_type == lexer.TOKEN_DONE: if buf.tell() > 0: lines.append(buf.getvalue().strip()) return lines
def value_to_string(self, quote, include_properties=True): buf = StringIO() buf.write(path.basename(self._filename)) buf.write(' ') buf.write(self._dst_filename) self._append_properties_string(buf, include_properties) return buf.getvalue()
def _to_string_empty(self, depth, indent): assert len(self.values) == 0 spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.key) buf.write(':') return buf.getvalue()
def to_string(self, delimiter = '=', quote_value = False): buf = StringIO() buf.write(str(self.key)) buf.write(delimiter) value = str(self.value) if quote_value: value = string_util.quote_if_needed(value) buf.write(value) return buf.getvalue()
def bytes_to_string(clazz, b): s = codecs.encode(b, 'hex').decode('ascii') assert (len(s) % 2) == 0 buf = StringIO() for i in range(0, len(s), 2): if i != 0: buf.write(' ') buf.write(s[i]) buf.write(s[i + 1]) return buf.getvalue()
def add_line_numbers(clazz, text, delimiter = '|'): lines = text.split('\n') width = math.trunc(math.log10(len(lines)) + 1) fmt = '%%%dd' % (width) buf = StringIO() for line_number, line in zip(range(1, 1 + len(lines)), lines): buf.write(fmt % (line_number)) buf.write(delimiter) buf.write(str(line)) buf.write('\n') return buf.getvalue()
def __str__(self): max_len = 0 for y in range(0, self.height): for x in range(0, self.width): max_len = max(len(str(self._rows[y][x])), max_len) buf = StringIO() for y in range(0, self.height): for x in range(0, self.width): buf.write(string_util.right_justify(str(self._rows[y][x]), max_len)) buf.write(' ') buf.write('\n') return buf.getvalue()
def to_string(self, strip_comments = False): buf = StringIO() for line in self._lines: buf.write(line.get_text(strip_comments = strip_comments)) buf.write(self._delimiter) v = buf.getvalue() if self._ends_with_delimiter: if v and v[-1] != self._delimiter: buf.write(self._delimiter) else: if v and v[-1] == self._delimiter: v = v[0:-1] return v
def replace_white_space(clazz, s, replacement): 'Replace white space sequences in s with replacement.' buf = StringIO() STATE_CHAR = 1 STATE_SPACE = 2 state = STATE_CHAR for c in s: if state == STATE_CHAR: if c.isspace(): buf.write(replacement) state = STATE_SPACE else: buf.write(c) elif state == STATE_SPACE: if not c.isspace(): buf.write(c) state = STATE_CHAR return buf.getvalue()
def to_string(self, strip_rows = False): buf = StringIO() col_widths = self.column_widths() if self._labels: for x in range(0, self._table.width): self._write_label(x, buf, col_widths) buf.write('\n') for y in range(0, self._table.height): row = self._table.row(y) assert len(row) == len(col_widths) row_buf = StringIO() for x in range(0, self._table.width): self._write_cell(x, y, row_buf, col_widths) row_str = row_buf.getvalue() if strip_rows: row_str = row_str.strip() buf.write(row_str) buf.write('\n') value = buf.getvalue() # remove the trailing new line return value[0:-1]
def __str__(self): buf = StringIO() for i, item in enumerate(self._stack): if i != 0: buf.write('/') buf.write(str(item.depth or 0)) buf.write(':') buf.write(item.data.text) buf.write(':') buf.write(item.data.line_number) return buf.getvalue()
def __str__(self): buf = StringIO() for step in self._steps: buf.write(str(step)) buf.write('\n') return buf.getvalue().strip()
def value_to_string(self, quote, include_properties=True): buf = StringIO() buf.write(self.__class__.__name__) self._append_properties_string(buf, include_properties) return buf.getvalue()
def to_string(self, depth, indent): spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.name) buf.write('\n') for value in self.values: buf.write(spaces) buf.write(indent * ' ') buf.write(value.to_string(depth = depth + 1)) buf.write('\n') return buf.getvalue().strip()
def string_to_bytes(clazz, s): buf = StringIO() for c in s: if not c.isspace(): buf.write(c) return codecs.decode(buf.getvalue(), 'hex')
def __str__(self): buf = StringIO() buf.write(self.HEADER) buf.write('BES_VERSION = u\'%s\'\n' % (self.version)) buf.write('BES_AUTHOR_NAME = u\'%s\'\n' % (self.author_name)) buf.write('BES_AUTHOR_EMAIL = u\'%s\'\n' % (self.author_email)) buf.write('BES_ADDRESS = u\'%s\'\n' % (self.address)) buf.write('BES_TAG = u\'%s\'\n' % (self.tag)) buf.write('BES_TIMESTAMP = u\'%s\'\n' % (self.timestamp)) return buf.getvalue()
def __str__(self): buf = StringIO() buf.write(self.name) if self.is_folder: buf.write('/') return buf.getvalue()
def __str__(self): buf = StringIO() for value in self._values: buf.write(str(value)) buf.write(';') return buf.getvalue()
class string_lexer(string_lexer_options.CONSTANTS): TOKEN_COMMENT = 'comment' TOKEN_DONE = 'done' TOKEN_SPACE = 'space' TOKEN_STRING = 'string' EOS = '\0' SINGLE_QUOTE_CHAR = '\'' DOUBLE_QUOTE_CHAR = "\"" COMMENT_CHAR = '#' def __init__(self, log_tag, options): log.add_logging(self, tag = log_tag) self._options = options or self.DEFAULT_OPTIONS self._keep_quotes = (self._options & self.KEEP_QUOTES) != 0 self._escape_quotes = (self._options & self.ESCAPE_QUOTES) != 0 self._ignore_comments = (self._options & self.IGNORE_COMMENTS) != 0 self._buffer = None self._is_escaping = False self._last_char = None self.STATE_BEGIN = string_lexer_state_begin(self) self.STATE_DONE = string_lexer_state_done(self) self.STATE_STRING = string_lexer_state_string(self) self.STATE_SPACE = string_lexer_state_space(self) self.STATE_SINGLE_QUOTED_STRING = string_lexer_state_single_quoted_string(self) self.STATE_DOUBLE_QUOTED_STRING = string_lexer_state_double_quoted_string(self) self.STATE_COMMENT = string_lexer_state_comment(self) self.state = self.STATE_BEGIN @property def ignore_comments(self): return self._ignore_comments @property def is_escaping(self): return self._is_escaping def _run(self, text): self.log_d('_run() text=\"%s\" options=%s)' % (text, str(string_lexer_options(self._options)))) assert self.EOS not in text self.position = point(1, 1) for c in self.__chars_plus_eos(text): self._is_escaping = self._last_char == '\\' should_handle_char = (self._is_escaping and c == '\\') or (c != '\\') if should_handle_char: tokens = self.state.handle_char(c) for token in tokens: self.log_d('tokenize: new token: %s' % (str(token))) yield token self._last_char = c if c == '\n': self.position = point(1, self.position.y + 1) else: self.position = point(self.position.x + 0, self.position.y) assert self.state == self.STATE_DONE yield lexer_token(self.TOKEN_DONE, None, self.position) @classmethod def tokenize(clazz, text, log_tag, options = None): return clazz(log_tag, options)._run(text) @classmethod def char_to_string(clazz, c): if c == clazz.EOS: return 'EOS' else: return c def change_state(self, new_state, c): assert new_state if new_state == self.state: return self.log_d('transition: %20s -> %-20s; %s' % (self.state.__class__.__name__, new_state.__class__.__name__, new_state._make_log_attributes(c, include_state = False))) self.state = new_state @classmethod def __chars_plus_eos(self, text): for c in text: yield c yield self.EOS def make_token_string(self): return lexer_token(self.TOKEN_STRING, self.buffer_value(), self.position) def make_token_space(self): return lexer_token(self.TOKEN_SPACE, self.buffer_value(), self.position) def make_token_comment(self): return lexer_token(self.TOKEN_COMMENT, self.buffer_value(), self.position) def buffer_reset(self, c = None): self._buffer = StringIO() if c: self.buffer_write(c) def buffer_reset_with_quote(self, c): assert c in [ self.SINGLE_QUOTE_CHAR, self.DOUBLE_QUOTE_CHAR ] self.buffer_reset() self.buffer_write_quote(c) def buffer_write(self, c): assert c != self.EOS self._buffer.write(c) def buffer_value(self): return self._buffer.getvalue() def buffer_write_quote(self, c): assert c in [ self.SINGLE_QUOTE_CHAR, self.DOUBLE_QUOTE_CHAR ] if self._keep_quotes: if self._escape_quotes: self.buffer_write('\\') self.buffer_write(c)
def __str__(self): buf = StringIO() if self.hardness: buf.write(str(self.hardness)) buf.write(' ') buf.write(self.name) if self.system_mask and self.system_mask != 'all': buf.write('(') buf.write(self.system_mask) buf.write(')') if self.operator: buf.write(' ') buf.write(self.operator) buf.write(' ') buf.write(self.version) return buf.getvalue()
def _to_string_no_mask(self, depth, indent, quote): spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.value_to_string(quote=quote)) return buf.getvalue()
def _to_string_multi_line(self, depth, indent): assert len(self.values) > 0 spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.key) buf.write('\n') for value in self.values: buf.write(spaces) buf.write(indent * ' ') buf.write(str(value)) buf.write('\n') return buf.getvalue().strip()
def descriptor_map_to_string(self): buf = StringIO() for k, v in self._descriptor_map.items(): buf.write('%s: %s\n' % (k, str(v))) return buf.getvalue().strip()
def _write_retry_script(clazz, command, env, script): from bes.compat import StringIO s = StringIO() s.write('#!/bin/bash\n') s.write('mkdir -p %s\n' % (script.staged_files_dir)) items = sorted(env.items()) last_item = items.pop(-1) def _item_str(key, value, slash): return '%s=\"%s\"%s\n' % (key, value, slash) for key, value in items: s.write(_item_str(key, value, '\\')) s.write(_item_str(last_item[0], last_item[1], '')) if string_util.is_string(command): s.write(command) else: s.write(' '.join(command)) content = s.getvalue() file_path = path.join(script.build_dir, clazz.RETRY_SCRIPT_FILENAME) file_util.save(file_path, content = content, mode = 0o755) return file_path
def __str__(self): buf = StringIO() for inst in self._values: buf.write(inst.to_string(depth=2)) buf.write('\n\n') return buf.getvalue().strip()
def instructions(self, env): buf = StringIO() buf.write('#!/bin/bash\n') buf.write('echo "----1----"\n') buf.write('declare -px\n') buf.write('echo "----2----"\n') for f in self.files_abs: buf.write('source \"%s\"\n' % (f)) buf.write('echo "----3----"\n') buf.write('declare -px\n') buf.write('echo "----4----"\n') script = temp_file.make_temp_file(content = buf.getvalue(), delete = not self.DEBUG) if self.DEBUG: print('env_dir: script=%s' % (script)) os.chmod(script, 0o755) try: rv = execute.execute(script, raise_error = True, shell = True, env = env) finally: if not self.DEBUG: file_util.remove(script) parser = text_line_parser(rv.stdout) if rv.stderr: raise RuntimeError(rv.stderr) env1 = self._parse_env_lines(parser.cut_lines('----1----', '----2----')) env2 = self._parse_env_lines(parser.cut_lines('----3----', '----4----')) delta = self._env_delta(env1, env2) instructions = [] for key in delta.added: instructions.append(instruction(key, env2[key], action.SET)) for key in delta.removed: instructions.append(instruction(key, None, action.UNSET)) for key in delta.changed: value1 = env1[key] value2 = env2[key] for inst in self._determine_change_instructions(key, value1, value2): instructions.append(inst) return sorted(instructions, key = lambda x: ( x.key, x.value ) )