Beispiel #1
0
def spots_index():
    """Handle the root spots collection."""

    if request.method == 'POST':
        # Parse the data and make sure that it has a year and a
        # non-negative amount of spots.
        try:
            body = request.get_json(force=True) or {}
        except Exception as e:
            return _make_error(f'Invalid JSON: {e}'), 400

        year = body.get('year')
        spots = body.get('spots')

        if year is None or spots is None:
            return _make_error('both year and spots must be provided'), 400

        try:
            # These conversions might fail if they aren't integer
            # strings.
            year = int(year)
            spots = int(spots)
        except ValueError:
            return _make_error(
                'year and spots must be integers.'
            ), 400

        if spots < 0:
            return _make_error('spots must be non-negative'), 400

        # Catch an error if the year isn't unique.
        try:
            row = csv_parser.append_data(year, spots)
        except ValueError as e:
            return _make_error(e.args[0]), 400
        else:
            return jsonify(row)
    elif request.method == 'GET':
        # Return the sunspots data over a range or offset.
        start = request.args.get('start')
        end = request.args.get('end')
        limit = request.args.get('limit')
        offset = request.args.get('offset')

        is_range_case = start is not None or end is not None
        is_offset_case = limit is not None or offset is not None

        if is_range_case and is_offset_case:
            return _make_error(
                'limit and/or offset cannot be combined with start and/or end'
            ), 400
        elif is_range_case:
            return _handle_range_case(start, end)
        elif is_offset_case:
            return _handle_offset_case(limit, offset)
        else:
            data = csv_parser.read_data()
            return jsonify(data)
Beispiel #2
0
def validate(path):
    global log
    for root, dirs, files in walk(path):
        for name in files:
            if name.endswith('.csv'):
                data = read_data(join(root, name))
                log[join(root, name)] = validate_data(data)
        for directory in dirs:
            validate(join(root, directory))
    return log
def test_read_data_values():
    data = csv_parser.read_data()

    # Checking the first and last values
    assert data[0]['id'] == 0
    assert data[0]['year'] == 1770
    assert data[0]['spots'] == 101
    assert data[-1]['id'] == 99
    assert data[-1]['year'] == 1869
    assert data[-1]['spots'] == 74
def test_read_data_types():
    data = csv_parser.read_data()

    # This should give a list.
    assert type(data) == list

    # Each element should be a dictionary with three integer keys.
    for row in data:
        assert len(row.keys()) == 3

        for key in ('id', 'year', 'spots'):
            assert type(row[key]) == int
def test_read_data_count():
    data = csv_parser.read_data()

    # 100 elements should be returned.
    assert len(data) == 100