示例#1
0
def build_managed_osm(conn, body):

    try:
        print "build_managed_osm"

        req_name = "managed_%s" % get_req_name(body['id'])
        directory = init_directory(req_name)

        feeds = body['feeds']

        download_managed_gtfs(directory, feeds)

        builder.generate_osm_extract(directory)

        key = Key(osm_bucket())
        osm_key = "%s.osm" % body['id']
        key.key = osm_key
        key.set_contents_from_filename(os.path.join(directory, 'extract.osm'))

        print 'uploaded osm'

        publisher = conn.Producer(routing_key="osm_extract_done",
                                  exchange=exchange)
        publisher.publish({'id': body['id'], 'osm_key': osm_key})

        print 'published extract_osm_done'

        if 'trigger_rebuild' in body and body['trigger_rebuild'] is True:
            publisher = conn.Producer(routing_key="build_managed",
                                      exchange=exchange)
            publisher.publish({
                'id': body['id'],
                'osm_key': osm_key,
                'feeds': feeds,
                'otp_version': body['otp_version']
            })

    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['id'], now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile, "a"))
        traceback.print_exc()
示例#2
0
def build_managed_osm(conn, body):

    try:        
        print "build_managed_osm"

        req_name = "managed_%s" % get_req_name(body['id']);
        directory = init_directory(req_name);
	
        feeds = body['feeds']

        download_managed_gtfs(directory, feeds)
        
        builder.generate_osm_extract(directory)

        key = Key(osm_bucket())
        osm_key = "%s.osm" % body['id']
        key.key = osm_key
        key.set_contents_from_filename(os.path.join(directory, 'extract.osm'))
        
        print 'uploaded osm'

        publisher = conn.Producer(routing_key="osm_extract_done", exchange=exchange)
        publisher.publish({ 'id' : body['id'], 'osm_key' : osm_key })       

        print 'published extract_osm_done'

        if 'trigger_rebuild' in body and body['trigger_rebuild'] is True:
            publisher = conn.Producer(routing_key="build_managed", exchange=exchange)
            publisher.publish({ 'id' : body['id'], 'osm_key' : osm_key, 'feeds' : feeds, 'otp_version' : body['otp_version'] })


    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['id'], now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile,"a"))
        traceback.print_exc()
示例#3
0
def create_instance(conn, body):

    try:
        req_name = get_req_name(body['request_id']);
        directory = init_directory(req_name);
        
        # extract gtfs files
        os.makedirs(os.path.join(directory, 'gtfs'))
        files = body['files']
        extra_props_list = body['extra_properties']
        extra_props_dict = { }
        out = []
        i = 0
        for s3_id in files:
            if s3_id is None:
                continue

            print "id: " + s3_id
        
            bucket = gtfs_bucket()
            key = Key(bucket)
            key.key = s3_id

            basename = os.path.basename(s3_id)
            path = os.path.join(directory, 'gtfs', basename)
            
            key.get_contents_to_filename(path)        

            extra_props_dict[basename] = extra_props_list[i]
            i += 1
       
        # prepare and run graph builder
        builder.generate_osm_extract(directory)
        builder.generate_graph_config(directory, body['fare_factory'], extra_props_dict)
        gbresults = builder.run_graph_builder(directory)
                
        print "finished gb: %s" % gbresults['success']

        msgparams = { }
        msgparams['request_id'] = body['request_id']
        msgparams['success'] = gbresults['success']

        bucket = graph_bucket()

        if gbresults['success']:
            key = Key(bucket)
            key.key = "uploads/%s/Graph_%s.obj" % (body['request_id'], datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"))
            graph_file = os.path.join(directory,'Graph.obj')
            key.set_contents_from_filename(graph_file)
            key.set_acl('public-read')
            msgparams['key'] = key.key
            subprocess.call(['rm', graph_file])
            
            msgparams['otp_version'] = get_otp_version()
            
        # create data tarball and upload to s3
        tarball = os.path.join('/mnt', ('%s.tar.gz' % req_name))
        subprocess.call(['tar', 'czf', tarball, directory])

        key = Key(bucket)
        data_key = "data/%s.tar.gz" % req_name
        key.key = data_key
        key.set_contents_from_filename(tarball)
        msgparams['data_key'] = data_key

        # publish graph_done message        
        publisher = conn.Producer(routing_key="graph_done", exchange=exchange)
        publisher.publish(msgparams)
        
        print 'published graph_done'
        
        # write gb output to file to s3
        write_output_to_s3("output/%s_output.txt" % req_name, gbresults['output'])

    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['request_id'], now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile,"a"))
示例#4
0
def create_instance(conn, body):

    try:
        req_name = get_req_name(body['request_id'])
        directory = init_directory(req_name)

        # extract gtfs files
        os.makedirs(os.path.join(directory, 'gtfs'))
        files = body['files']
        extra_props_list = body['extra_properties']
        extra_props_dict = {}
        out = []
        i = 0
        for s3_id in files:
            if s3_id is None:
                continue

            print "id: " + s3_id

            bucket = gtfs_bucket()
            key = Key(bucket)
            key.key = s3_id

            basename = os.path.basename(s3_id)
            path = os.path.join(directory, 'gtfs', basename)

            key.get_contents_to_filename(path)

            extra_props_dict[basename] = extra_props_list[i]
            i += 1

        # prepare and run graph builder
        builder.generate_osm_extract(directory)
        builder.generate_graph_config(directory, body['fare_factory'],
                                      extra_props_dict)
        gbresults = builder.run_graph_builder(directory)

        print "finished gb: %s" % gbresults['success']

        msgparams = {}
        msgparams['request_id'] = body['request_id']
        msgparams['success'] = gbresults['success']

        bucket = graph_bucket()

        if gbresults['success']:
            key = Key(bucket)
            key.key = "uploads/%s/Graph_%s.obj" % (
                body['request_id'],
                datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"))
            graph_file = os.path.join(directory, 'Graph.obj')
            key.set_contents_from_filename(graph_file)
            key.set_acl('public-read')
            msgparams['key'] = key.key
            subprocess.call(['rm', graph_file])

            msgparams['otp_version'] = get_otp_version()

        # create data tarball and upload to s3
        tarball = os.path.join('/mnt', ('%s.tar.gz' % req_name))
        subprocess.call(['tar', 'czf', tarball, directory])

        key = Key(bucket)
        data_key = "data/%s.tar.gz" % req_name
        key.key = data_key
        key.set_contents_from_filename(tarball)
        msgparams['data_key'] = data_key

        # publish graph_done message
        publisher = conn.Producer(routing_key="graph_done", exchange=exchange)
        publisher.publish(msgparams)

        print 'published graph_done'

        # write gb output to file to s3
        write_output_to_s3("output/%s_output.txt" % req_name,
                           gbresults['output'])

    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['request_id'],
                                             now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile, "a"))