def test_fast_int_given_float_returns_int(x):
    assume(not math.isnan(x))
    assume(not math.isinf(x))
    assert fastnumbers.fast_int(x) == int(x)
    assert fastnumbers.fast_int(x, raise_on_invalid=True) == int(x)
    assert fastnumbers.fast_int(x, None, True) == int(x)
    assert isinstance(fastnumbers.fast_int(x), (int, long))
Пример #2
0
    def _parse_data_coords(self, indices):
        data_coords = []
        out_dict = {}
        mccl = self.__stats[".mccl"] + 1

        for index in indices:
            start_pos = index * mccl
            next_start_pos = start_pos + mccl
            further_next_start_pos = next_start_pos + mccl

            # See if we already have cached the start position.
            if index in out_dict:
                data_start_pos = out_dict[index]
            # If not, retrieve the start position from the cc file and then cache it.
            else:
                data_start_pos = fastnumbers.fast_int(
                    self.__file_handles[".cc"]
                    [start_pos:next_start_pos].rstrip())
                out_dict[index] = data_start_pos

            # See if we already have cached the end position.
            if (index + 1) in out_dict:
                data_end_pos = out_dict[index + 1]
            # If not, retrieve the end position from the cc file and then cache it.
            else:
                data_end_pos = fastnumbers.fast_int(
                    self.__file_handles[".cc"]
                    [next_start_pos:further_next_start_pos].rstrip())
                out_dict[index + 1] = data_end_pos

            data_coords.append([data_start_pos, data_end_pos])

        return data_coords
Пример #3
0
    def info_id(self, post_id: str) -> Optional[base.InfoType]:
        try:
            _, _ = post_id.split("-")
            fast_int(post_id, raise_on_invalid=True)
        except (AttributeError, ValueError):
            raise ValueError("post_id for local clients must be in the form "
                             "'<booru>-<id>', e.g. 'danbooru-1'.")

        path = self.path / post_id / "info.json"
        try:
            return simplejson.loads(path.read_text())
        except FileNotFoundError:
            return None
Пример #4
0
def sort_order(request, context):
    """
    Return the query'd sort order from the web page
    """
    if "sort" in request.GET:
        #   Set sort_order, since there is a value in the post
        # pylint: disable=E1101
        request.session["sort_order"] = fastnumbers.fast_int(
            request.GET["sort"], 0)
        context["sort_order"] = fastnumbers.fast_int(request.GET["sort"], 0)
# pylint: enable=E1101
    else:
        context["sort_order"] = request.session.get("sort_order", 0)
    return request, context
Пример #5
0
    def invalid(self) -> Union[str, int, float, None]:
        """Property invalid value"""
        if self.col_invalid is None:
            return None

        if self.data_type in (
            DataType.CHAR,
            DataType.UCHAR,
            DataType.SHORT,
            DataType.USHORT,
            DataType.INT,
            DataType.UINT,
        ):
            try:
                return fast_int(self.col_invalid, raise_on_invalid=True)

            except ValueError:
                return None

        if self.data_type == DataType.FLOAT:
            try:
                return fast_float(self.col_invalid, raise_on_invalid=True)

            except ValueError:
                return None

        return str(self.col_invalid)
Пример #6
0
    def _merge_chunk_files(self, f4_file_path, num_processes, line_length,
                           num_rows_per_save, tmp_dir_path):
        with open(f4_file_path, "wb") as f4_file:
            out_lines = []

            for i in range(num_processes):
                chunk_file_path = f"{tmp_dir_path}{i}"
                if not os.path.exists(chunk_file_path):
                    continue
                chunk_file = f4py.open_read_file(chunk_file_path)

                size_file_path = f"{tmp_dir_path}{i}_linesizes"

                with open(size_file_path, 'rb') as size_file:
                    position = 0
                    for size_line in size_file:
                        size = fastnumbers.fast_int(size_line.rstrip(b"\n"))

                        out_line = chunk_file[position:(position + size)]
                        out_line = _format_string_as_fixed_width(
                            out_line, line_length)
                        out_lines.append(out_line)
                        position += size

                        if len(out_lines) % num_rows_per_save == 0:
                            f4_file.write(b"".join(out_lines))
                            out_lines = []

                chunk_file.close()
                os.remove(chunk_file_path)

            if len(out_lines) > 0:
                f4_file.write(b"".join(out_lines))
def test_fast_int_given_int_string_returns_int(x):
    y = repr(x)
    assert fastnumbers.fast_int(y) == x
    assert isinstance(fastnumbers.fast_int(y), (int, long))
    for base in range(2, 36+1):
        if len(y) < 30:  # Avoid recursion error because of overly simple baseN function.
            assert fastnumbers.fast_int(baseN(x, base), base=base) == x
    assert fastnumbers.fast_int(bin(x), base=2) == x
    assert fastnumbers.fast_int(bin(x), base=0) == x
    assert fastnumbers.fast_int(oct(x), base=8) == x
    assert fastnumbers.fast_int(oct(x), base=0) == x
    assert fastnumbers.fast_int(hex(x), base=16) == x
    assert fastnumbers.fast_int(hex(x), base=0) == x
Пример #8
0
    def info_location(self, location: str) -> base.InfoGenType:
        try:
            post_id = re.search(r"/posts/(\d+)\??.*$", location).group(1)
        except AttributeError:  # Not a direct post URL
            pass
        else:
            yield self.info_id(post_id)
            return

        parsed = parse_qs(urlparse(location).query)

        yield from self.info_search(
            tags=parsed.get("tags", [""])[-1],
            random=parsed.get("random", [False])[-1],
            raw=parsed.get("raw", [False])[-1],
            pages=fast_int(parsed.get("page", [1])[-1]),
            limit=fast_int(parsed.get("limit", [self.default_limit])[-1]))
Пример #9
0
    def _get_column_meta(self, fltr, select_columns):
        if len(select_columns) == 0:
            with f4py.Parser(self.data_file_path + ".cn",
                             fixed_file_extensions=["", ".cc"],
                             stats_file_extensions=[".ll",
                                                    ".mccl"]) as cn_parser:
                line_length = cn_parser.get_stat(".ll")
                coords = cn_parser._parse_data_coords([0, 1])

                # They are not in sorted order in the file, so we must put them in a dict and sort it.
                column_index_dict = {}
                for row_index in range(self.get_num_cols()):
                    values = cn_parser.__parse_row_values(row_index, coords)

                    column_index_dict[fastnumbers.fast_int(
                        values[1])] = values[0]

                select_columns = []
                for index, name in sorted(column_index_dict.items()):
                    select_columns.append(name)

                column_index_dict = {
                    name: index
                    for index, name in enumerate(select_columns)
                }
        else:
            select_columns = [x.encode() for x in select_columns]

            column_names_file_path = f"{self.data_file_path}.cn"

            with f4py.IndexHelper._get_index_parser(
                    column_names_file_path) as index_parser:
                column_index_dict = {
                    name:
                    self._get_column_index_from_name(index_parser,
                                                     name.decode())
                    for name in fltr.get_column_name_set()
                    | set(select_columns)
                }

        type_columns = fltr.get_column_name_set() | set(select_columns)
        filter_column_type_dict = {}
        for column_name in type_columns:
            column_index = column_index_dict[column_name]
            filter_column_type_dict[column_index] = self.get_column_type(
                column_index)

        column_indices = list(column_index_dict.values())
        column_coords = self._parse_data_coords(column_indices)
        column_coords_dict = {}
        for i in range(len(column_indices)):
            column_coords_dict[column_indices[i]] = column_coords[i]

        return select_columns, column_index_dict, filter_column_type_dict, column_coords_dict
def test_fast_int_given_invalid_base_errors_with_ValueError():
    with raises(ValueError):
        fastnumbers.fast_int('10', base=-1)
    with raises(ValueError):
        fastnumbers.fast_int('10', base=1)
    with raises(ValueError):
        fastnumbers.fast_int('10', base=37)
Пример #11
0
    def __init__(self, line, my_line='', header=False):
        self.attributes = dict()
        self.__strand = None
        self.header = True
        self.chrom, self.source, self.feature = None, None, None
        # pylint: disable=invalid-name
        self.id = None
        # pylint: enable=invalid-name
        self.parent = []
        self.start, self.end = None, None
        self.__score = None
        self.__phase = None
        self.__frame = None
        self._line = "NA"
        self.__gene = None
        self._transcript = None
        self.__feature = None

        self.attribute_order = []
        if line is None:  # Empty constructor
            return
        if line == '' and my_line == '':
            return

        if line == '' and my_line != "":
            self._line = my_line
        else:
            self._line = line

        self._line = self._line.strip()

        self._fields = self._line.split('\t')
        self.header = header

        if self.header or len(self._fields) != 9 or self._line == '' or self._line[0] == "#":
            self.__feature = None
            self.header = True
            return

        self.chrom, self.source = self._fields[0:2]
        self.start, self.end = tuple(fastnumbers.fast_int(i) for i in self._fields[3:5])

        self.score = self._fields[5]
        self.strand = self._fields[6]
        self.phase = self._fields[7]

        self._attr = self._fields[8]
        self._parse_attributes()
        self.feature = self._fields[2]
        self.__is_exon, self.__is_gene, self.__is_cds = None, None, None
        [intern(_) for _ in (str(self.chrom), str(self.source), str(self.feature))]
Пример #12
0
def load_data(filename=None, ini_group=""):
    """
:Description:
    Load data from the ini file

Args:

    filename: (default value = None) To override the filename
        pass a string containing the new filename.

    oname: The option name to read from the ini

 Returns:
    loaded dictionary

code::

    USER = load_user_data(settings_file)
    EMAIL = load_email_data(settings_file)

    """
    if filename is None:
        filename = "settings.ini"

    try:
#        config = six.moves.configparser.SafeConfigParser()
        config = configparser.SafeConfigParser()
        config.read(filename)
        for section in config.sections():
            #print("Section : ",section)
            sname = section.strip()
            configdata[sname]={}
            for option_name in config.options(section.strip()):
                value = config.get(sname, option_name).split(",")
                if len(value) == 1:
                    if (option_name.endswith("_path") or
                        option_name.endswith("_filename")):
#                        print (option_name)
                        value[0] = os.path.abspath(value[0])
                    configdata[sname][option_name] = fastnumbers.fast_int(value[0])
                else:
                    configdata[sname][option_name] = []
                    for cleanvalue in value:
                        if (option_name.endswith("_path") or
                            option_name.endswith("_filename")):
                            cleanvalue = os.path.abspath(cleanvalue)
                        configdata[sname][option_name].append(cleanvalue.strip())
#    except six.moves.configparser.NoSectionError:
    except configparser.NoSectionError:
        print("Error reading %s" % filename)
Пример #13
0
def _attribute_definition(val):
    val = val.replace('"', '')

    def last(value):
        if value in ("true", "True"):
            value = True
        elif value in ("False", "false"):
            value = False
        return value

    fast_number = functools.partial(fastnumbers.fast_float,
                                    key=last)
    val = fastnumbers.fast_int(val, key=fast_number)
    return val
Пример #14
0
    def _get_passing_row_indices(fltr, parser, line_length, coords_value,
                                 coords_position, file_handle, start_index,
                                 end_index):
        passing_row_indices = set()

        for i in range(start_index, end_index):
            if fltr.passes(
                    parser._parse_row_value(i, coords_value, line_length,
                                            file_handle)):
                passing_row_indices.add(
                    fastnumbers.fast_int(
                        parser._parse_row_value(i, coords_position,
                                                line_length, file_handle)))

        return passing_row_indices
Пример #15
0
    def _find_matching_row_indices(index_file_path, position_coords,
                                   positions):
        # To make this paralellizable, we pass just a file path rather than index_parser.
        with IndexHelper._get_index_parser(index_file_path) as index_parser:
            line_length = index_parser.get_stat(".ll")
            file_handle = index_parser.get_file_handle("")

            matching_row_indices = set()
            for i in range(positions[0], positions[1]):
                matching_row_indices.add(
                    fastnumbers.fast_int(
                        index_parser._parse_row_value(i, position_coords,
                                                      line_length,
                                                      file_handle)))

            return matching_row_indices
Пример #16
0
def bytes2human(size: Union[int, float],
                prefix: str = "",
                suffix: str = "") -> str:
    size = fast_float(size)  # Prevent proxied size problems with round()
    unit = ""

    for unit in SIZE_UNITS:
        if size < 1024:
            break

        size /= 1024

    size = fast_int(size)    if unit in "BK" else \
           round(size, 1) if unit == "M" else \
           round(size, 2)

    return f"{size}{prefix}{unit}{suffix}"
Пример #17
0
    def phase(self, value):
        """
        Setter for the phase attribute.
        :param value:
        :return:
        """

        if value is None:
            self.__phase = None
        else:
            try:
                phase = fastnumbers.fast_int(value)
            except (TypeError, ValueError):
                raise ValueError("Invalid phase: {0} (type: {1})".format(value, type(value)))
            if phase in (-1, 0, 1, 2):
                phase = max(phase, 0)
                self.__phase = phase
            elif phase in  (None, '.', '?'):
                self.__phase = None
            else:
                raise ValueError("Invalid phase: {0} (type: {1})".format(value, type(value)))
        self._set_frame()
Пример #18
0
    def _get_identifier_row_index(index_parser,
                                  query_value,
                                  end_index,
                                  num_processes=1):
        if end_index == 0:
            return -1

        line_length = index_parser.get_stat(".ll")
        file_handle = index_parser.get_file_handle("")
        value_coords = index_parser._parse_data_coords([0])[0]
        position_coords = index_parser._parse_data_coords([1])[0]

        matching_position = IndexHelper._binary_identifier_search(
            index_parser, line_length, value_coords, file_handle, query_value,
            0, end_index)

        if matching_position == -1:
            return -1

        matching_row_index = fastnumbers.fast_int(
            index_parser._parse_row_value(matching_position, position_coords,
                                          line_length, file_handle))

        return matching_row_index
def test_fast_int_given_invalid_string_raises_ValueError_if_raise_on_invalid_is_True(x):
    assume(not a_number(x))
    with raises(ValueError):
        fastnumbers.fast_int(x, raise_on_invalid=True)
        fastnumbers.fast_int(x, None, True)
def test_fast_int_given_invalid_string_returns_string_as_is(x):
    assume(not a_number(x))
    assert fastnumbers.fast_int(x) is x
    assert fastnumbers.fast_int(x, base=10) is x
def test_fast_int_given_unicode_non_numeral_returns_as_is(x):
    assert fastnumbers.fast_int(x) == x
def test_fast_int_given_unicode_digit_returns_int(x):
    assert fastnumbers.fast_int(x) == unicodedata.digit(x)
    assert isinstance(fastnumbers.fast_int(x), (int, long))
    # Try padded as well
    assert fastnumbers.fast_int(u'   ' + x + u'   ') == unicodedata.digit(x)
def test_fast_int_returns_transformed_input_if_invalid_and_key_is_given(x):
    assume(not a_number(x))
    fastnumbers.fast_int(x, key=len) == len(x)
def test_fast_int_returns_raises_ValueError_if_raise_on_invalid_is_True_and_default_is_given(x):
    assume(not a_number(x))
    with raises(ValueError):
        assert fastnumbers.fast_int(x, default=90, raise_on_invalid=True)
        assert fastnumbers.fast_int(x, 90, True)
def test_fast_int_given_float_string_raises_ValueError_if_raise_on_invalid_is_True(x):
    assume(not x.is_integer())
    y = repr(x)
    with raises(ValueError):
        fastnumbers.fast_int(y, None, True)
        fastnumbers.fast_int(y, raise_on_invalid=True)
def test_fast_int_given_float_intlike_string_returns_string_as_is(x):
    assume(not math.isnan(x))
    assume(x.is_integer())
    y = repr(x)
    assert fastnumbers.fast_int(y) is y
Пример #27
0
def archive_item(request, viewitem):
    """
    Serve the gallery items
    """
    context = {}
    paths = {}
    context["mobile"] = detect_mobile(request)
    request, context = sort_order(request, context)
    paths["archive_item"] = fastnumbers.fast_int(get_option_value(request,
        "a_item", 1))-1
    paths["item_fs"] = configdata["locations"]["albums_path"]\
        + urllib.unquote(request.path.replace("/",
                                              os.sep))
    paths["item_path"], paths["item_name"] = os.path.split(
        paths["item_fs"].lower())
    paths["thumb_path"] = paths["item_path"].replace("%salbums" % os.sep,
                                                     "%sthumbnails" % os.sep)
#    tnails = thumbnail.Thumbnails()
    paths["web_path"] = paths["item_path"].replace(
        configdata["locations"]["albums_path"].lower(), "")
    paths["web_thumbpath"] = paths["web_path"].replace("/albums",
                                                       "/thumbnails")+r"/"
    global_listings = read_from_cdl(paths["item_path"],
                                    sort_by=context["sort_order"])
    archive_index = return_cdl_index(global_listings, paths["item_name"])
    tools.assure_path_exists(paths["thumb_path"] + os.sep + paths["item_name"])
    listings = []
    archive_file = archives.id_cfile_by_sig(paths["item_fs"])
#    tnails = thumbnail.Thumbnails()
    for count, filename in enumerate(global_listings[archive_index][1].
                                     archive_file.listings):
        #               0,          1,          ,2
        #   Listings = filename, zip fqfn, web thumbnail path (Med & Large),

        #       3,                              4
        #   thumbnail fs path (med & large), background color

        listings.append((filename,
                         global_listings[archive_index][1].fq_filename,
                         (paths["web_thumbpath"] + paths["item_name"] + "/" +
                          THUMBNAIL.make_tnail_name(filename=filename)["medium"],
                          paths["web_thumbpath"] + paths["item_name"] + "/" +
                          THUMBNAIL.make_tnail_name(filename=filename)["large"]),
                         (THUMBNAIL.make_tnail_fsname(
                             paths["thumb_path"] + "%s%s%s%s" % (
                                 os.sep, paths["item_name"],
                                 os.sep, filename))["medium"],
                          THUMBNAIL.make_tnail_fsname(
                              paths["thumb_path"] + "%s%s%s%s" % (
                                  os.sep, paths["item_name"],
                                  os.sep, filename))["large"]),
                         thumbnail.THUMBNAIL_DB.get(
                                     global_listings[archive_index][1].
                                     file_extension, "#FFFFFF"),
                         count+1))
    if os.path.splitext(listings[paths["archive_item"]][0])[1][1:].lower()\
            in thumbnail.THUMBNAIL_DB:
        file_data = archive_file.extract_mem_file(\
            listings[paths["archive_item"]][0])
        #print listings[paths["archive_item"]]
        if file_data is not None:
            THUMBNAIL.create_thumbnail_from_memory(memory_image=file_data,
                t_filename=listings[paths["archive_item"]][3][1-(context["mobile"] is True)],\
                t_size=configdata["configuration"][SIZES[1-(context["mobile"] is True)]])

    context["current_page"] = request.GET.get("a_item")
    chk_list = Paginator(listings, 1)
    context["page_cnt"] = range(1, chk_list.num_pages+1)
    context["up_uri"] = "/".join(request.get_raw_uri().split("/")[0:-1])
    context["gallery_name"] = os.path.split(request.path_info)[-1]
    try:
        context["pagelist"] = chk_list.page(context["current_page"])
    except PageNotAnInteger:
        context["pagelist"] = chk_list.page(1)
    except EmptyPage:
        context["pagelist"] = chk_list.page(chk_list.num_pages)
    context["all_listings"] = global_listings

    context["prev_uri"], context["next_uri"] = return_prev_next(
        paths["item_path"], paths["web_path"], context["sort_order"])
    context["webpath"] = paths["web_path"] + "/%s" % paths["item_name"]
    thumbnail.pool.wait()
    template = loader.get_template('frontend/archive_item.html')
#        thumbnail.pool.shutdown()
    return HttpResponse(template.render(context, request))
def test_fast_int_given_invalid_type_raises_TypeError(x):
    with raises(TypeError):
        fastnumbers.fast_int(x)
def test_fast_int_given_int_returns_int(x):
    assert fastnumbers.fast_int(x) == x
    assert isinstance(fastnumbers.fast_int(x), (int, long))
def test_fast_int_returns_default_value_if_given_invalid_string(x):
    assume(not a_number(x))
    assert fastnumbers.fast_int(x, default=90) == 90
    assert fastnumbers.fast_int(x, 90) == 90
Пример #31
0
def galleryitem(request, viewitem):
    """
    Serve the gallery items
    """
    context = {}
    paths = {}
    context["mobile"] = detect_mobile(request)
    request, context = sort_order(request, context)
    paths["item_fs"] = configdata["locations"]["albums_path"]\
        + urllib.unquote(request.path.replace("/", os.sep))
    paths["item_path"], paths["item_name"] = os.path.split(paths["item_fs"].lower())

    if "download" in request.GET and "page" not in request.GET:
        return serve(request, os.path.basename(paths["item_fs"]),
                     paths["item_path"])
    paths["web_path"] = paths["item_path"].replace(
        configdata["locations"]["albums_path"].lower(), "")
    paths["web_thumbpath"] = paths["web_path"].replace("/albums",
                                                       "/thumbnails")+r"/"
    if not os.path.exists(paths["item_fs"]):
        #
        #   Albums doesn't exist
        return HttpResponseNotFound('<h1>Page not found</h1>')

    CDL.smart_read(paths["item_path"].lower().strip())
    cached_files, cached_dirs = CDL.return_sorted(
        scan_directory=paths["item_path"],
        sort_by=context["sort_order"], reverse=False)

    listings = []
    for count, dcache in enumerate(cached_dirs + cached_files):
        #               0,          1,          ,2                  , 3
        #   Listings = filename, dcache entry, web tnail path, tnail fs path
        #
        #   4
        #  web path to original
        listings.append((dcache[0].split("/")[0], dcache[1],
                         (paths["web_thumbpath"] +
                          THUMBNAIL.make_tnail_name(filename=dcache[0])["medium"],
                          paths["web_thumbpath"] +
                          THUMBNAIL.make_tnail_name(filename=dcache[0])["large"]),
                         (THUMBNAIL.make_tnail_fsname(
                             dcache[1].fq_filename)["medium"],
                          THUMBNAIL.make_tnail_fsname(
                              dcache[1].fq_filename)["large"]),
                         thumbnail.THUMBNAIL_DB.get(
                             dcache[1].file_extension, "#FFFFFF")))
    chk_list = Paginator(listings, 1)
    template = loader.get_template('frontend/gallery_item.html')
    context["gallery_name"] = os.path.split(request.path_info)[-1]
    try:
        context["pagelist"] = chk_list.page(request.GET.get("page"))
        context["page"] = request.GET.get("page")
    except PageNotAnInteger:
        for count, fentry in enumerate(cached_files):
            if fentry[1].filename.lower() == paths["item_name"].lower():
                context["page"] = 1+count+len(cached_dirs)
            else:
                context["pagelist"] = chk_list.page(1)
        context["pagelist"] = chk_list.page(context["page"])
    except EmptyPage:
        context["pagelist"] = chk_list.page(chk_list.num_pages)
    if "download" in request.GET and "page" in request.GET:
        return serve(request,
                     os.path.basename(
                         context["pagelist"].object_list[0][1].fq_filename),
                     os.path.dirname(
                         context["pagelist"].object_list[0][1].fq_filename))

    context["all_listings"] = listings
    context["current_page"] = context["page"]
    context["up_uri"] = "/".join(request.get_raw_uri().split("/")[0:-1])
    for entry in listings[fastnumbers.fast_int(context["page"])-1-(context["pagelist"].has_previous() is True):
            fastnumbers.fast_int(context["page"])+(context["pagelist"].has_next() is True)]:
            # context["pagelist"]:
        create_validate_thumb(src_file=entry[1],
                              t_file=entry[3][1-(context["mobile"] is True)],
                              t_size=configdata["configuration"][SIZES[2-(context["mobile"] is True)]])
    thumbnail.pool.wait()
    return HttpResponse(template.render(context, request))
def test_fast_int_returns_input_as_is_if_valid_and_key_is_given(x):
    fastnumbers.fast_int(x, key=len) == x
    fastnumbers.fast_int(str(x), key=len) == x
def test_fast_int_given_padded_int_string_returns_int(x, y, z):
    y = ''.join(repeat(' ', y)) + repr(x) + ''.join(repeat(' ', z))
    assert fastnumbers.fast_int(y) == x
    assert isinstance(fastnumbers.fast_int(y), (int, long))
def test_fast_forceint_given_unicode_of_more_than_one_char_returns_as_is(x):
    assume(not a_number(x))
    assert fastnumbers.fast_int(x) == x
def test_fast_int():
    # 1. float number
    assert fastnumbers.fast_int(-367.3268) == -367
    assert fastnumbers.fast_int(-367.3268, raise_on_invalid=True) == -367
    # 2. signed float string
    assert fastnumbers.fast_int("+367.3268") == "+367.3268"
    with raises(ValueError):
        assert fastnumbers.fast_int("+367.3268", None, True)
    # 3. float string with exponents
    assert fastnumbers.fast_int("-367.3268e207") == "-367.3268e207"
    # 4. float string with padded whitespace
    assert fastnumbers.fast_int("   -367.04   ") == "   -367.04   "
    # 5. int number
    assert fastnumbers.fast_int(499) == 499
    # 6. signed int string
    assert fastnumbers.fast_int("-499") == -499
    # 7. int string with padded whitespace
    assert fastnumbers.fast_int("   +3001   ") == 3001
    # 8. long number
    assert fastnumbers.fast_int(35892482945872302493) == 35892482945872302493
    # 9. long string
    if python_version_tuple()[0] == "2":
        assert fastnumbers.fast_int("35892482945872302493L") == 35892482945872302493
        assert fastnumbers.fast_int("35892482945872302493l") == 35892482945872302493
    assert fastnumbers.fast_int("35892482945872302493") == 35892482945872302493
    # 10. return type
    assert isinstance(fastnumbers.fast_int(4029.00), int)
    # 11. TypeError for invalid input
    with raises(TypeError):
        fastnumbers.fast_int(["hey"])
    # 12. Invalid input string
    assert fastnumbers.fast_int("not_a_number") == "not_a_number"
    with raises(ValueError):
        assert fastnumbers.fast_int("not_a_number", raise_on_invalid=True)
    # 13. Invalid input string with numbers
    assert fastnumbers.fast_int("26.8 lb") == "26.8 lb"
    with raises(ValueError):
        assert fastnumbers.fast_int("26.8 lb", None, True)
    # 14. Infinity
    assert fastnumbers.fast_int("inf") == "inf"
    # 15. NaN
    assert fastnumbers.fast_int("nan") == "nan"
    # 16. Sign/'e'/'.' only
    assert fastnumbers.fast_int("+") == "+"
    assert fastnumbers.fast_int("-") == "-"
    assert fastnumbers.fast_int("e") == "e"
    assert fastnumbers.fast_int(".") == "."
    # 17. Default on invalid... 'raise_on_invalid' supersedes
    assert fastnumbers.fast_int("invalid", default=90) == 90
    assert fastnumbers.fast_int("invalid", default=None) is None
    with raises(ValueError):
        assert fastnumbers.fast_int("invalid", 90, True)
    # 18. Unicode numbers
    assert fastnumbers.fast_int(u"⑦") == 7
    assert fastnumbers.fast_int(u"⁸") == 8
    assert fastnumbers.fast_int(u"⁸", base=10) == u"⁸"
    assert fastnumbers.fast_int(u"⅔") == u"⅔"
    assert fastnumbers.fast_int(u"Ⅴ") == u"Ⅴ"
    # 19. Key function
    assert fastnumbers.fast_int(76, key=len) == 76
    assert fastnumbers.fast_int("76", key=len) == 76
    assert fastnumbers.fast_int("invalid", key=len) == 7