def accept_instance_request(modeladmin, request, queryset): exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="create_instance", exchange=exchange) for irequest in queryset: if irequest.state != 'approved': files = [ gtfsfile.s3_key for gtfsfile in irequest.gtfsfile_set.all() ] extra_props = [ gtfsfile.extra_properties for gtfsfile in irequest.gtfsfile_set.all() ] publisher.publish({ "files": files, "extra_properties": extra_props, "request_id": irequest.id, "fare_factory": irequest.fare_factory }) if hasattr(queryset, 'update'): queryset.update(state='accepted', decision_date=datetime.now()) else: for irequest in queryset: irequest.state = "accepted" irequest.decision_date = datetime.now() irequest.save() #launch a graph builder EC2 instance check_for_running_instance(settings.GRAPH_BUILDER_AMI_ID)
def process_transload(conn, body, message): #attempt to download the URL url = body['transload'] if not url.startswith("http://") and not url.startswith("https://"): url = "http://%s" % url #make sure this still makes sense gtfs = GtfsFile.objects.get(id=body['gtfs_file_id']) irequest = gtfs.instance_request if irequest.state != "pre_transload": #this request has already been cancelled or is otherwise irrelevant print "wrong state", irequest.state, body message.ack() return try: resp = urlopen(url) except urllib2.HTTPError: print "failed" #this is a permanent failure, so we want to alert the user. notify_user_of_failure(url) irequest.state = "failed" irequest.save() message.ack() return tmpfile = TemporaryFile() copyfileobj(resp, tmpfile) bucket = s3_bucket() key = s3_key(bucket, gtfs) tmpfile.seek(0) key.set_contents_from_file(tmpfile) gtfs.s3_key = key.key gtfs.save() allDownloaded = True s3_keys = [] for gtfs in irequest.gtfsfile_set.all(): if not gtfs.s3_key: allDownloaded = False break s3_keys.append(gtfs.s3_key) if allDownloaded: publisher = conn.Producer(routing_key="validate_request", exchange=exchange) publisher.publish({"files": s3_keys, "request_id": irequest.id}) # start validator instance, if needed check_for_running_instance(settings.VALIDATOR_AMI_ID) irequest.state = "submitted" irequest.save() message.ack()
def process_transload(conn, body, message): #attempt to download the URL url = body['transload'] if not url.startswith("http://") and not url.startswith("https://"): url = "http://%s" % url #make sure this still makes sense gtfs = GtfsFile.objects.get(id=body['gtfs_file_id']) irequest = gtfs.instance_request if irequest.state != "pre_transload": #this request has already been cancelled or is otherwise irrelevant print "wrong state", irequest.state, body message.ack() return try: resp = urlopen(url) except urllib2.HTTPError: print "failed" #this is a permanent failure, so we want to alert the user. notify_user_of_failure(url) irequest.state = "failed" irequest.save() message.ack() return tmpfile = TemporaryFile() copyfileobj(resp, tmpfile) bucket = s3_bucket() key = s3_key(bucket, gtfs) tmpfile.seek(0) key.set_contents_from_file(tmpfile) gtfs.s3_key = key.key gtfs.save() allDownloaded = True s3_keys = [] for gtfs in irequest.gtfsfile_set.all(): if not gtfs.s3_key: allDownloaded = False break s3_keys.append(gtfs.s3_key) if allDownloaded: publisher = conn.Producer(routing_key="validate_request", exchange=exchange) publisher.publish({"files" : s3_keys, "request_id" : irequest.id}) # start validator instance, if needed check_for_running_instance(settings.VALIDATOR_AMI_ID) irequest.state = "submitted" irequest.save() message.ack()
def finalize_request(request): request_id = request.POST['request_id'] irequest = InstanceRequest.objects.get(id=request_id) if irequest.user != request.user: return redirect("/") exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="transload", exchange=exchange) transloading = False s3_keys = [] to_transload = [] for gtfs_file in irequest.gtfsfile_set.all(): if gtfs_file.transload_url: transloading = True to_transload.append(gtfs_file) else: s3_keys.append(gtfs_file.s3_key) if transloading: irequest.state = 'pre_transload' irequest.save() for gtfs_file in to_transload: publisher.publish({ "transload": gtfs_file.transload_url, "gtfs_file_id": gtfs_file.id }) else: irequest.state = 'submitted' irequest.save() publisher = conn.Producer(routing_key="validate_request", exchange=exchange) publisher.publish({"files": s3_keys, "request_id": irequest.id}) # start validator instance, if needed check_for_running_instance(settings.VALIDATOR_AMI_ID) publisher.close() return render_to_response(request, 'request_submitted.html', locals())
def rebuild_instance_request(modeladmin, request, queryset): exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="rebuild_graph", exchange=exchange) for irequest in queryset: publisher.publish({"request_id" : irequest.id, "data_key" : irequest.data_key}) if hasattr(queryset, 'update'): queryset.update(state='accepted', decision_date=datetime.now()) else: for irequest in queryset: irequest.state = "accepted" irequest.decision_date = datetime.now() irequest.save() #launch a graph builder EC2 instance check_for_running_instance(settings.GRAPH_BUILDER_AMI_ID)
def finalize_request(request): request_id = request.POST['request_id'] irequest = InstanceRequest.objects.get(id=request_id) if irequest.user != request.user: return redirect("/") exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="transload", exchange=exchange) transloading=False s3_keys = [] to_transload = [] for gtfs_file in irequest.gtfsfile_set.all(): if gtfs_file.transload_url: transloading = True to_transload.append(gtfs_file) else: s3_keys.append(gtfs_file.s3_key) if transloading: irequest.state = 'pre_transload' irequest.save() for gtfs_file in to_transload: publisher.publish({"transload": gtfs_file.transload_url, "gtfs_file_id" : gtfs_file.id}) else: irequest.state = 'submitted' irequest.save() publisher = conn.Producer(routing_key="validate_request", exchange=exchange) publisher.publish({"files" : s3_keys, "request_id" : irequest.id}) # start validator instance, if needed check_for_running_instance(settings.VALIDATOR_AMI_ID) publisher.close() return render_to_response(request, 'request_submitted.html', locals())
def rebuild_instance_request(modeladmin, request, queryset): exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="rebuild_graph", exchange=exchange) for irequest in queryset: publisher.publish({ "request_id": irequest.id, "data_key": irequest.data_key }) if hasattr(queryset, 'update'): queryset.update(state='accepted', decision_date=datetime.now()) else: for irequest in queryset: irequest.state = "accepted" irequest.decision_date = datetime.now() irequest.save() #launch a graph builder EC2 instance check_for_running_instance(settings.GRAPH_BUILDER_AMI_ID)
def accept_instance_request(modeladmin, request, queryset): exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="create_instance", exchange=exchange) for irequest in queryset: if irequest.state != 'approved': files = [gtfsfile.s3_key for gtfsfile in irequest.gtfsfile_set.all()] extra_props = [gtfsfile.extra_properties for gtfsfile in irequest.gtfsfile_set.all()] publisher.publish({"files": files, "extra_properties" : extra_props, "request_id" : irequest.id, "fare_factory" : irequest.fare_factory}) if hasattr(queryset, 'update'): queryset.update(state='accepted', decision_date=datetime.now()) else: for irequest in queryset: irequest.state = "accepted" irequest.decision_date = datetime.now() irequest.save() #launch a graph builder EC2 instance check_for_running_instance(settings.GRAPH_BUILDER_AMI_ID)