Exemplo n.º 1
0
def update_instance_data(args):
    azure_pricing = get_pricing()
    jsonfp = dumpjson(azure_pricing)
    if args.upload:
        upload('azure_instance_data.json', jsonfp)
    else:
        write_go('azure', jsonfp, dumpjson({}))
Exemplo n.º 2
0
def update_instance_data(args):
    raw_data = get_raw_instance_data()
    simple_instance_data = get_instance_data(raw_data)
    jsonfp = dumpjson(simple_instance_data)
    if args.upload:
        upload('aws_instance_data.json', jsonfp)
    else:
        write_go('aws', jsonfp)
Exemplo n.º 3
0
 def test_upload_file(self):
     with tempfile.TemporaryDirectory() as dirname:
         filepath = os.path.join(dirname, 'test.txt')
         with open(filepath, 'w') as f:
             f.write('upload')
         r = upload(self.url, file=filepath)
         assert r.status_code == HTTPStatus.OK
         r = upload(self.url, file=filepath, filename='test.txt')
         assert r.status_code == HTTPStatus.OK
Exemplo n.º 4
0
def update_instance_data(args):
    print('loading raw instance data')
    raw_data = get_raw_instance_data()
    print('getting instance data')
    simple_instance_data = get_instance_data(raw_data)
    jsonfp = dumpjson(simple_instance_data)
    if args.upload:
        upload('aws_instance_data.json', jsonfp)
    else:
        write_go('aws', jsonfp, dumpjson({}))
Exemplo n.º 5
0
def add_player():
    # if not session.get('logged_in'):
    #     redirect(url_for('main.login'))
    first_name = request.form['first_name'].strip().title()
    last_name = request.form['last_name'].strip().title()
    sex = {
        '0':'F',
        '1':'M',
        '2':'N'}[request.form['sex']]
    birth = request.form['bday']
    email = request.form['email'].strip().lower()
    entered = str(datetime.now())
    activity = {
        '0':'laser',
        '1':'learnToSkate'
    }[request.form['activity']]
    visit_time = "0000-00-00" #***ALERT***
    field = "1" #***ALERT***
    customer = [
        first_name,
        last_name,
        sex,
        birth,
        email,
        entered,
        visit_time,
        field
    ]
    if upload(customer,activity):
        flash('New entry was successfully posted')
    else:
        flash('IntegrityError')
    return redirect(url_for('customer.register'))
Exemplo n.º 6
0
def process():
	for item in get():
		try:
			item['uri'] = 'http://home.umonkey.net/public/Biopsyhoz_Dihanie_part1_2007.zip'
			zipname = download(item['uri'])
			try:
				lpath = processZipFile(zipname, realname=item['uri'].split('/')[-1], owner=item['owner'])
				print 'GOT:', lpath
				if lpath is not None:
					upload(lpath)
					submit(item['id'], settings['s3base'] + os.path.split(lpath)[-1] + '/album.xml')
				os.remove(zipname)
			except:
				os.remove(zipname)
				raise
		except Exception, e:
			print "    ERROR: " + str(e)
			traceback.print_exc()
def create_input_files(tile_id):

    print "Getting extent of", tile_id
    xmin, ymin, xmax, ymax = uu.coords(tile_id)

    # # Soil tiles are already processed, so there's no need to include them here.
    # # Below is the old code for tile-izing the histosole soil raster.
    # # Leaving this in case I ever add in soil processing again.
    # print "clip soil"
    # extra_param = ['-tr', '.00025', '.00025', '-dstnodata', '0']
    # clip_soil_tile = util.clip('hwsd_oc_final.tif', '{}_soil.tif'.format(tile_id), xmin, ymin, xmax, ymax, extra_param)
    #
    # print "removing no data flag from soil"
    # cmd = ['gdal_edit.py', '-unsetnodata', clip_soil_tile]
    # subprocess.check_call(cmd)
    #
    # print "uploading soil tile to s3"
    # util.upload(clip_soil_tile, cn.soil_C_processed_dir)

    print "Rasterizing ecozone"
    rasterized_eco_zone_tile = util.rasterize(
        'fao_ecozones_bor_tem_tro.shp',
        "{}_fao_ecozones_bor_tem_tro.tif".format(tile_id), xmin, ymin, xmax,
        ymax, '.008', 'Byte', 'recode', '0')

    print "Resampling eco zone"
    resampled_ecozone = util.resample(
        rasterized_eco_zone_tile,
        "{0}_{1}.tif".format(tile_id, cn.pattern_fao_ecozone_processed))

    print "Uploading processed ecozone"
    util.upload(resampled_ecozone, cn.fao_ecozone_processed_dir)

    print "Clipping srtm"
    tile_srtm = util.clip('srtm.vrt', '{}_srtm.tif'.format(tile_id), xmin,
                          ymin, xmax, ymax)

    print "Resampling srtm"
    tile_res_srtm = util.resample(
        tile_srtm, '{0}_{1}.tif'.format(tile_id, cn.pattern_srtm))

    print "Uploading processed srtm"
    util.upload(tile_res_srtm, cn.srtm_processed_dir)

    print "Clipping precipitation"
    clipped_precip_tile = util.clip('add_30s_precip.tif',
                                    '{}_clip_precip.tif'.format(tile_id), xmin,
                                    ymin, xmax, ymax)

    print "Resampling precipitation"
    resample_precip_tile = util.resample(
        clipped_precip_tile, '{0}_{1}.tif'.format(tile_id, cn.pattern_precip))

    print "Uploading processed precipitation"
    util.upload(resample_precip_tile, cn.precip_processed_dir)
Exemplo n.º 8
0
def main(dirname, graph_name, username, password):
    server_root = urlparse.urlparse(graph_name)
    server_root = urlparse.urlunparse(server_root[:2] + ('/','','',''))

    graph = rdflib.ConjunctiveGraph()

    for root, dirs, files in os.walk(dirname):
        for filename in files:
            if filename.startswith('.'):
                continue
            filename = os.path.join(root, filename)
            uri = server_root + filename[len(dirname):]
            uri, format = uri.rsplit('.', 1)
            if uri.endswith('/index'):
                uri = uri[:-5]
            format = formats[format]

            graph.parse(open(filename, 'r'), uri, format)

    contents = StringIO.StringIO( graph.serialize(format='n3'))
    response = upload(contents, graph_name+'.n3', username, password)
    print response.code
    print response.read()
Exemplo n.º 9
0
        for family in families_available:
            if family not in machine_families_for_custom_vm_sizes:
                continue
            gpus = get_available_gpus(zone, family, supported_gpus)
            data = make_custom_instance_data(zone, family, pricing, gpus)
            if data:
                zone_custom_vm_data.append(data)
        instance_data[zone] = zone_data
        custom_instance_data[zone] = zone_custom_vm_data
    return instance_data, custom_instance_data


if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument('--project_id',
                        help='Google Cloud project ID.',
                        default='elotl-kip')
    parser.add_argument('--upload', action="store_true", default=False)
    args = parser.parse_args()
    instance_data, custom_instance_data = get_instance_data(args.project_id)
    if args.upload:
        jsonfp = dumpjson(instance_data)
        upload('gce_instance_data.json', jsonfp)
        jsonfp = dumpjson(custom_instance_data)
        upload('gce_custom_instance_data.json', jsonfp)
    else:
        write_go('gce', dumpjson(instance_data),
                 dumpjson(custom_instance_data))
Exemplo n.º 10
0
Arquivo: course.py Projeto: kq2/Ricin
    def upload(self):
        util.upload(self.folder)

        assets_folder = self.folder.rpartition('/')[0] + '/assets'
        util.upload(assets_folder)
Exemplo n.º 11
0
    util.generate_build(branch='master',
                        number=dotnet_commit,
                        timestamp=timestamp,
                        type='rolling',
                        repository='https://github.com/dotnet/performance')

    logger.info("Generating measurement json file")
    util.generate_measurement_csv(
        datafile=path.join(reports_dir, 'benchview.csv'),
        metric='Average Throughput',
        unit='Mbps',
        ascending=False,
    )

    logger.info("Generating submission json file")
    config = {
        'Transport': 'HTTP',
        'Region': 'US West',
        'Number of Files': '50',
        'File Size': '100 MB'
    }
    util.generate_submission(group="Network",
                             type="rolling",
                             config_name="standard",
                             config=config,
                             arch=platform.machine(),
                             machinepool='perfsnake')

    logger.info("Uploading submission to BenchView")
    util.upload(container="dotnetcli")
Exemplo n.º 12
0
def update_storage_data(args):
    pricing = get_storage_pricing()
    jsonfp = dumpjson(pricing)
    if args.upload:
        upload('aws_storage_data.json', jsonfp)
Exemplo n.º 13
0
def update_network_data(args):
    elb_pricing = get_elb_pricing()
    jsonfp = dumpjson(elb_pricing)
    if args.upload:
        upload('aws_network_data.json', jsonfp)
Exemplo n.º 14
0
import urllib2, base64, sys

from util import upload

if __name__ == '__main__':
    args = sys.argv[1:]
    args[0] = open(args[0], 'r')

    response = upload(*args)

    print response.code
    print response.read()

Exemplo n.º 15
0
 def test_upload_io(self):
     f = io.StringIO()
     f.write('upload')
     f.seek(0)
     r = upload(self.url, file=f, filename='test.txt')
     assert r.status_code == HTTPStatus.OK