def str_impl(s): n = len(s) kind = s._get_kind() is_ascii = kind == 1 and s.isascii() result = unicode._empty_string(kind, n, is_ascii) for i in range(n): code = get_code(s, i) unicode._set_code_point(result, i, code) return result
def tostr_impl(s): n = len(s) is_ascii = s.isascii() result = unicode._empty_string(unicode.PY_UNICODE_1BYTE_KIND, n, is_ascii) for i in range(n): code = get_code(s, i) unicode._set_code_point(result, i, code) return result
def int_to_str(x): l = 0 _x = x while _x > 0: _x = _x // 10 l += 1 s = _empty_string(PY_UNICODE_1BYTE_KIND, l) for i in range(l): digit = x % 10 _set_code_point(s, l - i - 1, digit + DIGITS_START) x = x // 10 return s
def float_to_str(x): if (x == np.inf): return 'inf' elif (x == -np.inf): return '-inf' l1, l2 = 0, -1 _x = x while _x > 0: _x = _x // 10 l1 += 1 _x = x % 10 while _x > 1e-10: _x = (_x * 10) % 10 l2 += 1 l2 = max(1, l2) l = l1 + l2 + 1 _x = x - np.floor(x) s = _empty_string(PY_UNICODE_1BYTE_KIND, l) _x = x for i in range(l1): digit = _x % 10 _set_code_point(s, l1 - i - 1, digit + DIGITS_START) _x = _x // 10 # if(l2 > 0): _set_code_point(s, l1, DOT) _x = x % 10 for i in range(l2): _x = (_x * 10) % 10 digit = int(_x) # print('here',i,digit) _set_code_point(s, l1 + i + 1, digit + DIGITS_START) return s