Beispiel #1
0
def upload(dataset_id):
    '''
    Upload a zip of one shapefile to datastore
    '''
    datastore = make_datastore(app.config['DATASTORE'])

    # Check that they uploaded a .zip file
    if not request.files['file'] or not allowed_file(request.files['file'].filename):
        return make_response("Only .zip files allowed", 403)

    # Upload original file to S3
    zip_buff = StringIO(request.files['file'].read())
    zip_base = '{0}/uploads/trail-segments'.format(dataset_id)
    datastore.write(zip_base + '.zip', zip_buff)

    # Get geojson data from shapefile
    shapefile_path = unzip(zip_buff)
    geojson_obj = shapefile2geojson(shapefile_path)

    # Compress geojson file
    geojson_zip = StringIO()
    geojson_raw = json.dumps(geojson_obj)
    zip_file(geojson_zip, geojson_raw, 'trail-segments.geojson')
    
    # Upload .geojson.zip file to datastore
    datastore.write(zip_base + '.geojson.zip', geojson_zip)

    # Show sample data from original file
    return redirect('/datasets/' + dataset_id + "/sample-segment")
Beispiel #2
0
def name_trails(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the transformed segments file
    transformed_segments_path = '{0}/opentrails/segments.geojson.zip'.format(dataset.id)
    transformed_segments_zip = datastore.read(transformed_segments_path)

    # Unzip it
    segments_path = unzip(transformed_segments_zip, '.geojson', [])
    transformed_segments = json.load(open(segments_path))

    # Generate a list of (name, ids) tuples
    named_trails = make_named_trails(transformed_segments['features'])
    
    file = StringIO()
    cols = 'id', 'name', 'segment_ids', 'description', 'part_of'
    writer = csv.writer(file)
    writer.writerow(cols)
    for row in named_trails:
        writer.writerow([(row[c] or '').encode('utf8') for c in cols])

    named_trails_path = '{0}/opentrails/named_trails.csv'.format(dataset.id)
    datastore.write(named_trails_path, file)

    return redirect('/datasets/' + dataset.id + '/named-trails', code=303)
Beispiel #3
0
def transform_trailheads(dataset_id):
    '''
    Grab a zip file off of datastore
    Unzip it
    Transform into opentrails
    Upload
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the original trailheads file
    up_trailheads_name = '{0}/uploads/trail-trailheads.geojson.zip'.format(dataset.id)
    up_trailheads_zip = datastore.read(up_trailheads_name)

    # Unzip it
    up_trailheads_path = unzip(up_trailheads_zip, '.geojson', [])
    up_trailheads = json.load(open(up_trailheads_path))
    messages, ot_trailheads = trailheads_transform(up_trailheads, dataset)

    # Save messages for output
    transform_messages_path = dataset.id + "/opentrails/trailheads-messages.json"
    datastore.write(transform_messages_path, StringIO(json.dumps(messages)))

    # Make a zip from transformed trailheads
    ot_trailheads_zip = StringIO()
    ot_trailheads_raw = json.dumps(ot_trailheads, sort_keys=True)
    zip_file(ot_trailheads_zip, ot_trailheads_raw, 'trailheads.geojson')

    # Upload transformed trailheads and messages
    zip_path = '{0}/opentrails/trailheads.geojson.zip'.format(dataset.id)
    datastore.write(zip_path, ot_trailheads_zip)

    return redirect('/datasets/' + dataset.id + '/transformed-trailheads', code=303)
Beispiel #4
0
def upload(dataset_id):
    '''
    Upload a zip of one shapefile to datastore
    '''
    datastore = make_datastore(app.config['DATASTORE'])

    # Check that they uploaded a .zip file
    if not request.files['file'] or not allowed_file(
            request.files['file'].filename):
        return make_response("Only .zip files allowed", 403)

    # Upload original file to S3
    zip_buff = StringIO(request.files['file'].read())
    zip_base = '{0}/uploads/trail-segments'.format(dataset_id)
    datastore.write(zip_base + '.zip', zip_buff)

    # Get geojson data from shapefile
    shapefile_path = unzip(zip_buff)
    geojson_obj = shapefile2geojson(shapefile_path)

    # Compress geojson file
    geojson_zip = StringIO()
    geojson_raw = json.dumps(geojson_obj)
    zip_file(geojson_zip, geojson_raw, 'trail-segments.geojson')

    # Upload .geojson.zip file to datastore
    datastore.write(zip_base + '.geojson.zip', geojson_zip)

    # Show sample data from original file
    return redirect('/datasets/' + dataset_id + "/sample-segment")
Beispiel #5
0
def name_trails(dataset_id):
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the transformed segments file
    transformed_segments_path = '{0}/opentrails/segments.geojson.zip'.format(
        dataset.id)
    transformed_segments_zip = datastore.read(transformed_segments_path)

    # Unzip it
    segments_path = unzip(transformed_segments_zip, '.geojson', [])
    transformed_segments = json.load(open(segments_path))

    # Generate a list of (name, ids) tuples
    named_trails = make_named_trails(transformed_segments['features'])

    file = StringIO()
    cols = 'id', 'name', 'segment_ids', 'description', 'part_of'
    writer = csv.writer(file)
    writer.writerow(cols)
    for row in named_trails:
        writer.writerow([(row[c] or '').encode('utf8') for c in cols])

    named_trails_path = '{0}/opentrails/named_trails.csv'.format(dataset.id)
    datastore.write(named_trails_path, file)

    return redirect('/datasets/' + dataset.id + '/named-trails', code=303)
def merger(files,env_to_merge, dossier_work, separateurcsv=','):
    ### create dir new
    if not os.path.exists(dossier_work+'new/'): os.makedirs(dossier_work+'new/')
    ### create dir env
    ### unzip all env in dir
    for environement in env_to_merge:
        if not os.path.exists(dossier_work+environement+'/'): os.makedirs(dossier_work+environement+'/')
        unzip(dossier_work+environement+'.zip', dossier_work+environement+'/')
        ## delete dl zip
        os.remove(dossier_work+environement+'.zip')
    for fichier in files:
        print(environement+ '--' +fichier[0])
        output = codecs.open(dossier_work+'new/'+fichier[0]+'.txt', 'w', 'utf-8')
        header=[]
        for champs in fichier[1]:  # Write header
            header.append(champs[0])
        temp=csv_list_to_raw_str(header)
        output.write(temp)
        for environement in env_to_merge:
            if os.path.isfile(dossier_work+environement+'/'+fichier[0]+'.txt'):
                marker=environement.replace('-','_')+':'
                file = codecs.open(dossier_work+environement+'/'+fichier[0]+'.txt', 'r', 'utf-8')
                reader = csv.reader(file, delimiter=separateurcsv, quoting=csv.QUOTE_MINIMAL)
                count=0
                positions=[]
                for row in reader:
                    if count==0:
                        for champs in fichier[1]:
                             positions.append(findposition(champs[0],row)) #Find position of every fields
                    else:
                        row_to_write=[]
                        i=0
                        for champs in fichier[1]:
                            if positions[i]!=999:
                                value=row[positions[i]]
                                if champs[1]==1:
                                    value=marker+value
                            else:
                                value=''
                            row_to_write.append(value)
                            i+=1
                        temp=csv_list_to_raw_str(row_to_write)
                        output.write(temp)
                    count+=1
            else:
                print("FILE "+environement+" - "+fichier[0]+" NOT EXIST")
Beispiel #7
0
def transform_trailheads(dataset_id):
    '''
    Grab a zip file off of datastore
    Unzip it
    Transform into opentrails
    Upload
    '''
    datastore = make_datastore(app.config['DATASTORE'])
    dataset = get_dataset(datastore, dataset_id)
    if not dataset:
        return make_response("No Dataset Found", 404)

    # Download the original trailheads file
    up_trailheads_name = '{0}/uploads/trail-trailheads.geojson.zip'.format(
        dataset.id)
    up_trailheads_zip = datastore.read(up_trailheads_name)

    # Unzip it
    up_trailheads_path = unzip(up_trailheads_zip, '.geojson', [])
    up_trailheads = json.load(open(up_trailheads_path))
    messages, ot_trailheads = trailheads_transform(up_trailheads, dataset)

    # Save messages for output
    transform_messages_path = dataset.id + "/opentrails/trailheads-messages.json"
    datastore.write(transform_messages_path, StringIO(json.dumps(messages)))

    # Make a zip from transformed trailheads
    ot_trailheads_zip = StringIO()
    ot_trailheads_raw = json.dumps(ot_trailheads, sort_keys=True)
    zip_file(ot_trailheads_zip, ot_trailheads_raw, 'trailheads.geojson')

    # Upload transformed trailheads and messages
    zip_path = '{0}/opentrails/trailheads.geojson.zip'.format(dataset.id)
    datastore.write(zip_path, ot_trailheads_zip)

    return redirect('/datasets/' + dataset.id + '/transformed-trailheads',
                    code=303)
Beispiel #8
0
    # func.ShowImage(result_of_h1_pool, PixelsToBeRedH1pool,'Final_image_No1_0.20 .png', 0.2, RowNumber=4, ColNumber=8)

    PixelsToBeRedConv1 = func.unpooling(PixelsToBeRedH1pool, conv_1_result)
    # func.ShowImage(conv_1_result, PixelsToBeRedConv1,'Final_image_No1_0.20 .png', 0.2, RowNumber=4, ColNumber=8)

    PixelsToBeRedImage = func.Deconvolution(PixelsToBeRedConv1, Conv1Kernel,
                                            image)
    PixelsToBeRedImage = func.CollectRedValues(PixelsToBeRedImage, image)

    func.ShowImage(image, PixelsToBeRedImage, TrueNum, threshold, correct,
                   probabilities[0, TestNumber], ShowNotSave)

    return correct, probabilities[0, TestNumber]


func.unzip('ubyte.gz')

X_data = np.array([[]])
y_data = np.array([[]])
X_data, y_data = func.LoadTrainData('./', 'train')
X_test_cand, y_test_cand = func.LoadTrainData('./', 't10k')

X_train, y_train = X_data[:55000, :], y_data[:55000]
X_valid, y_valid = X_data[55000:, :], y_data[55000:]

X_test_cand = X_test_cand[np.argsort(y_test_cand)]
y_test_cand = y_test_cand[np.argsort(y_test_cand)]

tmp = 0
StartIndex = np.array([0])
for index, i in enumerate(y_test_cand):