Exemplo n.º 1
0
def named_trails(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    return render_template('dataset-04-named-trails.html', dataset=dataset)
Exemplo n.º 2
0
def name_trails(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the transformed segments file
    transformed_segments_path = '{0}/opentrails/segments.geojson.zip'.format(dataset.id)
    transformed_segments_zip = datastore.read(transformed_segments_path)

    # Unzip it
    segments_path = unzip(transformed_segments_zip, '.geojson', [])
    transformed_segments = json.load(open(segments_path))

    # Generate a list of (name, ids) tuples
    named_trails = make_named_trails(transformed_segments['features'])
    
    file = StringIO()
    cols = 'id', 'name', 'segment_ids', 'description', 'part_of'
    writer = csv.writer(file)
    writer.writerow(cols)
    for row in named_trails:
        writer.writerow([(row[c] or '').encode('utf8') for c in cols])

    named_trails_path = '{0}/opentrails/named_trails.csv'.format(dataset.id)
    datastore.write(named_trails_path, file)

    return redirect('/datasets/' + dataset.id + '/named-trails', code=303)
Exemplo n.º 3
0
def name_trails(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the transformed segments file
    transformed_segments_path = '{0}/opentrails/segments.geojson.zip'.format(
        dataset.id)
    transformed_segments_zip = datastore.read(transformed_segments_path)

    # Unzip it
    segments_path = unzip(transformed_segments_zip, '.geojson', [])
    transformed_segments = json.load(open(segments_path))

    # Generate a list of (name, ids) tuples
    named_trails = make_named_trails(transformed_segments['features'])

    file = StringIO()
    cols = 'id', 'name', 'segment_ids', 'description', 'part_of'
    writer = csv.writer(file)
    writer.writerow(cols)
    for row in named_trails:
        writer.writerow([(row[c] or '').encode('utf8') for c in cols])

    named_trails_path = '{0}/opentrails/named_trails.csv'.format(dataset.id)
    datastore.write(named_trails_path, file)

    return redirect('/datasets/' + dataset.id + '/named-trails', code=303)
Exemplo n.º 4
0
def named_trails(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    return render_template('dataset-04-named-trails.html', dataset=dataset)
Exemplo n.º 5
0
def upload(dataset_id):
    '''
    Upload a zip of one shapefile to datastore
    '''
    datastore = make_datastore(app.config['DATASTORE'])

    # Check that they uploaded a .zip file
    if not request.files['file'] or not allowed_file(
            request.files['file'].filename):
        return make_response("Only .zip files allowed", 403)

    # Upload original file to S3
    zip_buff = StringIO(request.files['file'].read())
    zip_base = '{0}/uploads/trail-segments'.format(dataset_id)
    datastore.write(zip_base + '.zip', zip_buff)

    # Get geojson data from shapefile
    shapefile_path = unzip(zip_buff)
    geojson_obj = shapefile2geojson(shapefile_path)

    # Compress geojson file
    geojson_zip = StringIO()
    geojson_raw = json.dumps(geojson_obj)
    zip_file(geojson_zip, geojson_raw, 'trail-segments.geojson')

    # Upload .geojson.zip file to datastore
    datastore.write(zip_base + '.geojson.zip', geojson_zip)

    # Show sample data from original file
    return redirect('/datasets/' + dataset_id + "/sample-segment")
Exemplo n.º 6
0
def upload(dataset_id):
    '''
    Upload a zip of one shapefile to datastore
    '''
    datastore = make_datastore(app.config['DATASTORE'])

    # Check that they uploaded a .zip file
    if not request.files['file'] or not allowed_file(request.files['file'].filename):
        return make_response("Only .zip files allowed", 403)

    # Upload original file to S3
    zip_buff = StringIO(request.files['file'].read())
    zip_base = '{0}/uploads/trail-segments'.format(dataset_id)
    datastore.write(zip_base + '.zip', zip_buff)

    # Get geojson data from shapefile
    shapefile_path = unzip(zip_buff)
    geojson_obj = shapefile2geojson(shapefile_path)

    # Compress geojson file
    geojson_zip = StringIO()
    geojson_raw = json.dumps(geojson_obj)
    zip_file(geojson_zip, geojson_raw, 'trail-segments.geojson')
    
    # Upload .geojson.zip file to datastore
    datastore.write(zip_base + '.geojson.zip', geojson_zip)

    # Show sample data from original file
    return redirect('/datasets/' + dataset_id + "/sample-segment")
Exemplo n.º 7
0
def view_stewards(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    return render_template('dataset-05-stewards.html', dataset=dataset)
Exemplo n.º 8
0
def transform_trailheads(dataset_id):
    '''
    Grab a zip file off of datastore
    Unzip it
    Transform into opentrails
    Upload
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the original trailheads file
    up_trailheads_name = '{0}/uploads/trail-trailheads.geojson.zip'.format(dataset.id)
    up_trailheads_zip = datastore.read(up_trailheads_name)

    # Unzip it
    up_trailheads_path = unzip(up_trailheads_zip, '.geojson', [])
    up_trailheads = json.load(open(up_trailheads_path))
    messages, ot_trailheads = trailheads_transform(up_trailheads, dataset)

    # Save messages for output
    transform_messages_path = dataset.id + "/opentrails/trailheads-messages.json"
    datastore.write(transform_messages_path, StringIO(json.dumps(messages)))

    # Make a zip from transformed trailheads
    ot_trailheads_zip = StringIO()
    ot_trailheads_raw = json.dumps(ot_trailheads, sort_keys=True)
    zip_file(ot_trailheads_zip, ot_trailheads_raw, 'trailheads.geojson')

    # Upload transformed trailheads and messages
    zip_path = '{0}/opentrails/trailheads.geojson.zip'.format(dataset.id)
    datastore.write(zip_path, ot_trailheads_zip)

    return redirect('/datasets/' + dataset.id + '/transformed-trailheads', code=303)
Exemplo n.º 9
0
def view_stewards(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    return render_template('dataset-05-stewards.html', dataset=dataset)
Exemplo n.º 10
0
def datasets():
    '''
    List out all the datasets that have used opentrails so far
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    datasets_list = datastore.datasets()
    return render_template('datasets_list.html', datasets_list=datasets_list, server_url=request.url_root)
Exemplo n.º 11
0
def datasets():
    '''
    List out all the datasets that have used opentrails so far
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    datasets_list = datastore.datasets()
    return render_template('datasets_list.html',
                           datasets_list=datasets_list,
                           server_url=request.url_root)
Exemplo n.º 12
0
def existing_validation(id):
    '''
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, id)
    if not dataset:
        return make_response("No dataset Found", 404)

    return render_template('check-01-upload-opentrails.html', dataset=dataset)
Exemplo n.º 13
0
def download_opentrails_data(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    buffer = package_opentrails_archive(dataset)

    return send_file(buffer, 'application/zip')
Exemplo n.º 14
0
def download_opentrails_data(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    buffer = package_opentrails_archive(dataset)

    return send_file(buffer, 'application/zip')
Exemplo n.º 15
0
def existing_validation(id):
    '''
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, id)
    if not dataset:
        return make_response("No dataset Found", 404)

    return render_template('check-01-upload-opentrails.html', dataset=dataset)
Exemplo n.º 16
0
def validated_results(dataset_id):
    ''' 
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No dataset Found", 404)

    path = '{0}/opentrails/validate-messages.json'.format(dataset.id)
    messages = map(tuple, json.load(datastore.read(path)))
    
    return render_template('check-02-validated-opentrails.html', messages=messages)
Exemplo n.º 17
0
def validated_results(dataset_id):
    ''' 
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No dataset Found", 404)

    path = '{0}/opentrails/validate-messages.json'.format(dataset.id)
    messages = map(tuple, json.load(datastore.read(path)))

    return render_template('check-02-validated-opentrails.html',
                           messages=messages)
Exemplo n.º 18
0
def show_sample_segment(dataset_id):
    '''
    Show an example row of data
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No dataset Found", 404)

    features = get_sample_segment_features(dataset)

    keys = list(sorted(features[0]['properties'].keys()))
    args = dict(dataset=dataset, uploaded_features=features, uploaded_keys=keys)
    return render_template("dataset-02-show-sample-segment.html", **args)
Exemplo n.º 19
0
def new_dataset():
    '''
    Create a unique url for this dataset to work under
    Create a folder on S3 using this url
    '''
    # Make a new dataset object
    id = str(uuid.uuid4())
    dataset = Dataset(id)
    dataset.datastore = make_datastore(app.config['DATASTORE'])
    
    # Write a verifying file to prove we created these folders
    validname = '{0}/uploads/.valid'.format(dataset.id)
    dataset.datastore.write(validname, StringIO(dataset.id))

    return redirect('/datasets/' + dataset.id)
Exemplo n.º 20
0
def existing_dataset(id):
    '''
    Reads available files on S3 to figure out how far a dataset has gotten in the process
    '''

    # Init some variable
    # sample_segment, opentrails_sample_segment = False, False

    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, id)
    if not dataset:
        return make_response("No dataset Found", 404)

    # return render_template('index.html', steward = steward, sample_segment = sample_segment, opentrails_sample_segment = opentrails_sample_segment)
    return render_template('dataset-01-upload-segments.html', dataset=dataset)
Exemplo n.º 21
0
def existing_dataset(id):
    '''
    Reads available files on S3 to figure out how far a dataset has gotten in the process
    '''

    # Init some variable
    # sample_segment, opentrails_sample_segment = False, False

    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, id)
    if not dataset:
        return make_response("No dataset Found", 404)

    # return render_template('index.html', steward = steward, sample_segment = sample_segment, opentrails_sample_segment = opentrails_sample_segment)
    return render_template('dataset-01-upload-segments.html', dataset=dataset)
Exemplo n.º 22
0
def new_dataset():
    '''
    Create a unique url for this dataset to work under
    Create a folder on S3 using this url
    '''
    # Make a new dataset object
    id = str(uuid.uuid4())
    dataset = Dataset(id)
    dataset.datastore = make_datastore(app.config['DATASTORE'])

    # Write a verifying file to prove we created these folders
    validname = '{0}/uploads/.valid'.format(dataset.id)
    dataset.datastore.write(validname, StringIO(dataset.id))

    return redirect('/datasets/' + dataset.id)
Exemplo n.º 23
0
def show_sample_trailhead(dataset_id):
    '''
    Show an example row of data
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No dataset Found", 404)

    features = get_sample_trailhead_features(dataset)

    keys = list(sorted(features[0]['properties'].keys()))
    args = dict(dataset=dataset,
                uploaded_features=features,
                uploaded_keys=keys)
    return render_template("dataset-07-show-sample-trailhead.html", **args)
Exemplo n.º 24
0
def validate_upload(dataset_id):
    ''' 
    '''
    datastore = make_datastore(app.config['DATASTORE'])

    # Check that they uploaded a .zip file
    if not request.files['file'] or not allowed_file(
            request.files['file'].filename):
        return make_response("Only .zip files allowed", 403)

    # Read zip data to buffer
    zipfile_data = StringIO()
    zipfile_path = '{0}/uploads/open-trails.zip'.format(dataset_id)
    request.files['file'].save(zipfile_data)

    # Upload original file data to S3
    datastore.write(zipfile_path, zipfile_data)

    # Validate data locally.
    zf = zipfile.ZipFile(zipfile_data, 'r')
    local_dir = mkdtemp(prefix='validate-')

    names = [
        'trail_segments.geojson', 'named_trails.csv', 'trailheads.geojson',
        'stewards.csv', 'areas.geojson'
    ]

    for name in sorted(zf.namelist()):
        base, (_, ext) = os.path.basename(name), os.path.splitext(name)

        if base in names:
            with open(os.path.join(local_dir, base), 'w') as file:
                file.write(zf.open(name).read())

    args = [os.path.join(local_dir, base) for base in names]
    messages, succeeded = check_open_trails(*args)

    # Clean up after ourselves.
    shutil.rmtree(local_dir)

    path = '{0}/opentrails/validate-messages.json'.format(dataset_id)
    datastore.write(path, StringIO(json.dumps(messages)))

    # Show sample data from original file
    return redirect('/checks/' + dataset_id + "/results", code=303)
Exemplo n.º 25
0
def validate_upload(dataset_id):
    ''' 
    '''
    datastore = make_datastore(app.config['DATASTORE'])

    # Check that they uploaded a .zip file
    if not request.files['file'] or not allowed_file(request.files['file'].filename):
        return make_response("Only .zip files allowed", 403)
        
    # Read zip data to buffer
    zipfile_data = StringIO()
    zipfile_path = '{0}/uploads/open-trails.zip'.format(dataset_id)
    request.files['file'].save(zipfile_data)

    # Upload original file data to S3
    datastore.write(zipfile_path, zipfile_data)

    # Validate data locally.
    zf = zipfile.ZipFile(zipfile_data, 'r')
    local_dir = mkdtemp(prefix='validate-')

    names = ['trail_segments.geojson', 'named_trails.csv',
             'trailheads.geojson', 'stewards.csv', 'areas.geojson']

    for name in sorted(zf.namelist()):
        base, (_, ext) = os.path.basename(name), os.path.splitext(name)

        if base in names:
            with open(os.path.join(local_dir, base), 'w') as file:
                file.write(zf.open(name).read())

    args = [os.path.join(local_dir, base) for base in names]
    messages, succeeded = check_open_trails(*args)
    
    # Clean up after ourselves.
    shutil.rmtree(local_dir)
    
    path = '{0}/opentrails/validate-messages.json'.format(dataset_id)
    datastore.write(path, StringIO(json.dumps(messages)))

    # Show sample data from original file
    return redirect('/checks/' + dataset_id + "/results", code=303)
Exemplo n.º 26
0
def status():
    response = {}
    response['status'] = 'ok'
    response["updated"] = int(time.time())
    response["dependencies"] = ["S3"]

    # Connect to S3
    try:
      datastore = make_datastore(app.config['DATASTORE'])
    except AttributeError:
      response['status'] = 'Can\'t parse S3 auth'
      response = make_response(json.dumps(response), 403)
      return response

    if not datastore.bucket:
      response['status'] = 'Can\'t connect to S3'
      response = make_response(json.dumps(response), 403)
      return response

    response = make_response(json.dumps(response), 200)
    return response
Exemplo n.º 27
0
def status():
    response = {}
    response['status'] = 'ok'
    response["updated"] = int(time.time())
    response["dependencies"] = ["S3"]

    # Connect to S3
    try:
        datastore = make_datastore(app.config['DATASTORE'])
    except AttributeError:
        response['status'] = 'Can\'t parse S3 auth'
        response = make_response(json.dumps(response), 403)
        return response

    if not datastore.bucket:
        response['status'] = 'Can\'t connect to S3'
        response = make_response(json.dumps(response), 403)
        return response

    response = make_response(json.dumps(response), 200)
    return response
Exemplo n.º 28
0
def create_steward(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    steward_fields = 'name', 'id', 'url', 'phone', 'address', 'publisher', 'license'
    steward_values = [request.form.get(f, None) for f in steward_fields]

    steward_values[steward_fields.index('id')] = '0' # This is assigned in segments_transform()
    steward_values[steward_fields.index('publisher')] = 'no' # Better safe than sorry
    
    file = StringIO()
    cols = 'id', 'name', 'segment_ids', 'description', 'part_of'
    writer = csv.writer(file)
    writer.writerow(steward_fields)
    writer.writerow([(v or '').encode('utf8') for v in steward_values])
    
    stewards_path = '{0}/opentrails/stewards.csv'.format(dataset.id)
    datastore.write(stewards_path, file)

    return redirect('/datasets/' + dataset.id + '/stewards', code=303)
Exemplo n.º 29
0
def transform_trailheads(dataset_id):
    '''
    Grab a zip file off of datastore
    Unzip it
    Transform into opentrails
    Upload
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the original trailheads file
    up_trailheads_name = '{0}/uploads/trail-trailheads.geojson.zip'.format(
        dataset.id)
    up_trailheads_zip = datastore.read(up_trailheads_name)

    # Unzip it
    up_trailheads_path = unzip(up_trailheads_zip, '.geojson', [])
    up_trailheads = json.load(open(up_trailheads_path))
    messages, ot_trailheads = trailheads_transform(up_trailheads, dataset)

    # Save messages for output
    transform_messages_path = dataset.id + "/opentrails/trailheads-messages.json"
    datastore.write(transform_messages_path, StringIO(json.dumps(messages)))

    # Make a zip from transformed trailheads
    ot_trailheads_zip = StringIO()
    ot_trailheads_raw = json.dumps(ot_trailheads, sort_keys=True)
    zip_file(ot_trailheads_zip, ot_trailheads_raw, 'trailheads.geojson')

    # Upload transformed trailheads and messages
    zip_path = '{0}/opentrails/trailheads.geojson.zip'.format(dataset.id)
    datastore.write(zip_path, ot_trailheads_zip)

    return redirect('/datasets/' + dataset.id + '/transformed-trailheads',
                    code=303)
Exemplo n.º 30
0
def transformed_trailheads(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the original trailheads file
    uploaded_features = get_sample_trailhead_features(dataset)
    uploaded_keys = list(sorted(uploaded_features[0]['properties'].keys()))

    # Download the transformed trailheads file
    transformed_features = get_sample_transformed_trailhead_features(dataset)
    transformed_keys = list(
        sorted(transformed_features[0]['properties'].keys()))

    # Download the transformed trailheads messages file
    messages_path = '{0}/opentrails/trailheads-messages.json'.format(
        dataset.id)
    data = json.load(datastore.read(messages_path))

    try:
        messages = [(type, id, words) for (type, id, words) in data]
    except ValueError:
        # Old stored format.
        messages = [(type, None, words) for (type, words) in data]

    message_types = [message[0] for message in messages]

    vars = dict(dataset=dataset,
                messages=messages,
                uploaded_keys=uploaded_keys,
                uploaded_features=uploaded_features,
                transformed_features=transformed_features,
                transformed_keys=transformed_keys,
                transform_succeeded=bool('error' not in message_types))

    return render_template('dataset-08-transformed-trailheads.html', **vars)
Exemplo n.º 31
0
def transformed_trailheads(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the original trailheads file
    uploaded_features = get_sample_trailhead_features(dataset)
    uploaded_keys = list(sorted(uploaded_features[0]['properties'].keys()))

    # Download the transformed trailheads file
    transformed_features = get_sample_transformed_trailhead_features(dataset)
    transformed_keys = list(sorted(transformed_features[0]['properties'].keys()))

    # Download the transformed trailheads messages file
    messages_path = '{0}/opentrails/trailheads-messages.json'.format(dataset.id)
    data = json.load(datastore.read(messages_path))

    try:
        messages = [(type, id, words) for (type, id, words) in data]
    except ValueError:
        # Old stored format.
        messages = [(type, None, words) for (type, words) in data]

    message_types = [message[0] for message in messages]

    vars = dict(
        dataset = dataset,
        messages = messages,
        uploaded_keys = uploaded_keys,
        uploaded_features = uploaded_features,
        transformed_features = transformed_features,
        transformed_keys = transformed_keys,
        transform_succeeded = bool('error' not in message_types)
        )

    return render_template('dataset-08-transformed-trailheads.html', **vars)
Exemplo n.º 32
0
def create_steward(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    steward_fields = 'name', 'id', 'url', 'phone', 'address', 'publisher', 'license'
    steward_values = [request.form.get(f, None) for f in steward_fields]

    steward_values[steward_fields.index(
        'id')] = '0'  # This is assigned in segments_transform()
    steward_values[steward_fields.index(
        'publisher')] = 'no'  # Better safe than sorry

    file = StringIO()
    cols = 'id', 'name', 'segment_ids', 'description', 'part_of'
    writer = csv.writer(file)
    writer.writerow(steward_fields)
    writer.writerow([(v or '').encode('utf8') for v in steward_values])

    stewards_path = '{0}/opentrails/stewards.csv'.format(dataset.id)
    datastore.write(stewards_path, file)

    return redirect('/datasets/' + dataset.id + '/stewards', code=303)