예제 #1
0
def rebuild_graph(conn, body):

    try:
        req_name = body['data_key'][5:][:-7]

        directory = os.path.join('/mnt', req_name)

        # download and extract data tarball
        bucket = graph_bucket()
        key = Key(bucket)
        key.key = body['data_key']
        tarball = '/mnt/data.tar.gz'
        key.get_contents_to_filename(tarball)
        subprocess.call(['tar', 'xvf', tarball, '-C', '/'])

        # run graph builder
        gbresults = builder.run_graph_builder(directory)

        msgparams = {}
        msgparams['request_id'] = body['request_id']
        msgparams['success'] = gbresults['success']

        if gbresults['success']:
            #upload graph to s3
            key = Key(bucket)
            key.key = "uploads/%s/Graph_%s.obj" % (
                body['request_id'],
                datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"))
            graph_file = os.path.join(directory, 'Graph.obj')
            key.set_contents_from_filename(graph_file)
            key.set_acl('public-read')
            msgparams['key'] = key.key
            subprocess.call(['rm', graph_file])

            msgparams['otp_version'] = get_otp_version()

        publisher = conn.Producer(routing_key="rebuild_graph_done",
                                  exchange=exchange)
        publisher.publish(msgparams)

        print 'published rebuild_graph_done'

        # write gb output to file to s3
        write_output_to_s3(req_name, gbresults['output'])

    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['request_id'],
                                             now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile, "a"))
예제 #2
0
파일: handlers.py 프로젝트: MiaoS/OTPSetup
def rebuild_graph(conn, body):

    try:        
        req_name = body['data_key'][5:][:-7]

        directory = os.path.join('/mnt', req_name)

        # download and extract data tarball
        bucket = graph_bucket()
        key = Key(bucket)
        key.key = body['data_key']
        tarball = '/mnt/data.tar.gz'
        key.get_contents_to_filename(tarball)
        subprocess.call(['tar', 'xvf', tarball, '-C', '/'])
        
        # run graph builder
        gbresults = builder.run_graph_builder(directory)
                
        msgparams = { }
        msgparams['request_id'] = body['request_id']
        msgparams['success'] = gbresults['success']

        if gbresults['success']:
            #upload graph to s3            
            key = Key(bucket)
            key.key = "uploads/%s/Graph_%s.obj" % (body['request_id'], datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"))
            graph_file = os.path.join(directory,'Graph.obj')
            key.set_contents_from_filename(graph_file)
            key.set_acl('public-read')
            msgparams['key'] = key.key
            subprocess.call(['rm', graph_file])
            
            msgparams['otp_version'] = get_otp_version()
            
        
        publisher = conn.Producer(routing_key="rebuild_graph_done", exchange=exchange)
        publisher.publish(msgparams)
        
        print 'published rebuild_graph_done'
        
        # write gb output to file to s3
        write_output_to_s3(req_name, gbresults['output'])


    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['request_id'], now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile,"a"))
예제 #3
0
파일: handlers.py 프로젝트: MiaoS/OTPSetup
def build_managed(conn, body):

    try:        
        print "build_managed"

        print "osm_key=%s" % body['osm_key']

        feeds = body['feeds']

        if body['osm_key'] is None or body['osm_key'] == "":
            print "no osm key"
            publisher = conn.Producer(routing_key="build_managed_osm", exchange=exchange)
            publisher.publish({ 'id' : body['id'], 'feeds' : feeds, 'trigger_rebuild' : True, 'otp_version' : body['otp_version'] })
            return

        
        print "key exists, building"

        #config = json.loads(body['config'])

        # download jar for specified OTP version
        download_jar(body['otp_version'])

        # set up working directory
        req_name = "managed_%s" % get_req_name(body['id']);
        directory = init_directory(req_name);
        download_managed_gtfs(directory, feeds)


        # download osm extract
        bucket = osm_bucket()
        key = Key(bucket)
        key.key = body['osm_key']
        path = os.path.join(directory, 'extract.osm')
        key.get_contents_to_filename(path)


        # run graph builder
        builder.generate_graph_config_managed(directory, feeds) 
        gbresults = builder.run_graph_builder(directory)

        graph_key = None

        timestamp = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")

        # upload graph to S3
        if gbresults['success']:
            key = Key(graph_bucket())
            graph_key = "managed/%s/Graph_%s.obj" % (str(body['id']).zfill(6), timestamp)
            key.key = graph_key
            graph_file = os.path.join(directory,'Graph.obj')
            key.set_contents_from_filename(graph_file)

        # write gb output to file to s3
        output_key = "managed/%s/output_%s.txt" % (str(body['id']).zfill(6), timestamp)
        write_output_to_s3(output_key, gbresults['output'])

        # publish managed_graph_done
        publisher = conn.Producer(routing_key="managed_graph_done", exchange=exchange)
        publisher.publish({ 'id' : body['id'], 'success' : gbresults['success'], 'graph_key' : graph_key, 'output_key' : output_key, 'otp_version' : get_otp_version() })

    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['id'], now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile,"a"))
        traceback.print_exc()
예제 #4
0
파일: handlers.py 프로젝트: MiaoS/OTPSetup
def create_instance(conn, body):

    try:
        req_name = get_req_name(body['request_id']);
        directory = init_directory(req_name);
        
        # extract gtfs files
        os.makedirs(os.path.join(directory, 'gtfs'))
        files = body['files']
        extra_props_list = body['extra_properties']
        extra_props_dict = { }
        out = []
        i = 0
        for s3_id in files:
            if s3_id is None:
                continue

            print "id: " + s3_id
        
            bucket = gtfs_bucket()
            key = Key(bucket)
            key.key = s3_id

            basename = os.path.basename(s3_id)
            path = os.path.join(directory, 'gtfs', basename)
            
            key.get_contents_to_filename(path)        

            extra_props_dict[basename] = extra_props_list[i]
            i += 1
       
        # prepare and run graph builder
        builder.generate_osm_extract(directory)
        builder.generate_graph_config(directory, body['fare_factory'], extra_props_dict)
        gbresults = builder.run_graph_builder(directory)
                
        print "finished gb: %s" % gbresults['success']

        msgparams = { }
        msgparams['request_id'] = body['request_id']
        msgparams['success'] = gbresults['success']

        bucket = graph_bucket()

        if gbresults['success']:
            key = Key(bucket)
            key.key = "uploads/%s/Graph_%s.obj" % (body['request_id'], datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"))
            graph_file = os.path.join(directory,'Graph.obj')
            key.set_contents_from_filename(graph_file)
            key.set_acl('public-read')
            msgparams['key'] = key.key
            subprocess.call(['rm', graph_file])
            
            msgparams['otp_version'] = get_otp_version()
            
        # create data tarball and upload to s3
        tarball = os.path.join('/mnt', ('%s.tar.gz' % req_name))
        subprocess.call(['tar', 'czf', tarball, directory])

        key = Key(bucket)
        data_key = "data/%s.tar.gz" % req_name
        key.key = data_key
        key.set_contents_from_filename(tarball)
        msgparams['data_key'] = data_key

        # publish graph_done message        
        publisher = conn.Producer(routing_key="graph_done", exchange=exchange)
        publisher.publish(msgparams)
        
        print 'published graph_done'
        
        # write gb output to file to s3
        write_output_to_s3("output/%s_output.txt" % req_name, gbresults['output'])

    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['request_id'], now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile,"a"))
예제 #5
0
def build_managed(conn, body):

    try:
        print "build_managed"

        print "osm_key=%s" % body['osm_key']

        feeds = body['feeds']

        if body['osm_key'] is None or body['osm_key'] == "":
            print "no osm key"
            publisher = conn.Producer(routing_key="build_managed_osm",
                                      exchange=exchange)
            publisher.publish({
                'id': body['id'],
                'feeds': feeds,
                'trigger_rebuild': True,
                'otp_version': body['otp_version']
            })
            return

        print "key exists, building"

        #config = json.loads(body['config'])

        # download jar for specified OTP version
        download_jar(body['otp_version'])

        # set up working directory
        req_name = "managed_%s" % get_req_name(body['id'])
        directory = init_directory(req_name)
        download_managed_gtfs(directory, feeds)

        # download osm extract
        bucket = osm_bucket()
        key = Key(bucket)
        key.key = body['osm_key']
        path = os.path.join(directory, 'extract.osm')
        key.get_contents_to_filename(path)

        # run graph builder
        builder.generate_graph_config_managed(directory, feeds)
        gbresults = builder.run_graph_builder(directory)

        graph_key = None

        timestamp = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")

        # upload graph to S3
        if gbresults['success']:
            key = Key(graph_bucket())
            graph_key = "managed/%s/Graph_%s.obj" % (str(
                body['id']).zfill(6), timestamp)
            key.key = graph_key
            graph_file = os.path.join(directory, 'Graph.obj')
            key.set_contents_from_filename(graph_file)

        # write gb output to file to s3
        output_key = "managed/%s/output_%s.txt" % (str(
            body['id']).zfill(6), timestamp)
        write_output_to_s3(output_key, gbresults['output'])

        # publish managed_graph_done
        publisher = conn.Producer(routing_key="managed_graph_done",
                                  exchange=exchange)
        publisher.publish({
            'id': body['id'],
            'success': gbresults['success'],
            'graph_key': graph_key,
            'output_key': output_key,
            'otp_version': get_otp_version()
        })

    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['id'], now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile, "a"))
        traceback.print_exc()
예제 #6
0
def create_instance(conn, body):

    try:
        req_name = get_req_name(body['request_id'])
        directory = init_directory(req_name)

        # extract gtfs files
        os.makedirs(os.path.join(directory, 'gtfs'))
        files = body['files']
        extra_props_list = body['extra_properties']
        extra_props_dict = {}
        out = []
        i = 0
        for s3_id in files:
            if s3_id is None:
                continue

            print "id: " + s3_id

            bucket = gtfs_bucket()
            key = Key(bucket)
            key.key = s3_id

            basename = os.path.basename(s3_id)
            path = os.path.join(directory, 'gtfs', basename)

            key.get_contents_to_filename(path)

            extra_props_dict[basename] = extra_props_list[i]
            i += 1

        # prepare and run graph builder
        builder.generate_osm_extract(directory)
        builder.generate_graph_config(directory, body['fare_factory'],
                                      extra_props_dict)
        gbresults = builder.run_graph_builder(directory)

        print "finished gb: %s" % gbresults['success']

        msgparams = {}
        msgparams['request_id'] = body['request_id']
        msgparams['success'] = gbresults['success']

        bucket = graph_bucket()

        if gbresults['success']:
            key = Key(bucket)
            key.key = "uploads/%s/Graph_%s.obj" % (
                body['request_id'],
                datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"))
            graph_file = os.path.join(directory, 'Graph.obj')
            key.set_contents_from_filename(graph_file)
            key.set_acl('public-read')
            msgparams['key'] = key.key
            subprocess.call(['rm', graph_file])

            msgparams['otp_version'] = get_otp_version()

        # create data tarball and upload to s3
        tarball = os.path.join('/mnt', ('%s.tar.gz' % req_name))
        subprocess.call(['tar', 'czf', tarball, directory])

        key = Key(bucket)
        data_key = "data/%s.tar.gz" % req_name
        key.key = data_key
        key.set_contents_from_filename(tarball)
        msgparams['data_key'] = data_key

        # publish graph_done message
        publisher = conn.Producer(routing_key="graph_done", exchange=exchange)
        publisher.publish(msgparams)

        print 'published graph_done'

        # write gb output to file to s3
        write_output_to_s3("output/%s_output.txt" % req_name,
                           gbresults['output'])

    except:
        now = datetime.now()
        errfile = "/var/otp/gb_err_%s_%s" % (body['request_id'],
                                             now.strftime("%F-%T"))
        traceback.print_exc(file=open(errfile, "a"))