def upload(request): #todo: prevent too many uploads by the same IP address request_id = request.GET['request_id'] irequest = InstanceRequest.objects.get(id=request_id) if irequest.user != request.user: return redirect("/") uploaded = irequest.gtfsfile_set.count() files = [ ] for file in irequest.gtfsfile_set.all(): file_name = file.s3_key if (file.s3_key is not None) else file.transload_url file_obj = { 'name' : file_name, 'id' : file.id } files.append(file_obj) base_filename = "uploads/%s/%s_" % (request_id, str(uuid.uuid4())) upload_filename = base_filename + "${filename}" aws_access_key_id = settings.AWS_ACCESS_KEY_ID after_upload_url = request.build_absolute_uri("/done_upload") policy = make_s3_policy(base_filename, after_upload_url) base64_policy = base64.b64encode(policy) signature = s3_sign(policy, settings.AWS_SECRET_KEY) s3_bucket = settings.S3_BUCKET return render_to_response(request, 'upload.html', locals())
def upload(request): #todo: prevent too many uploads by the same IP address request_id = request.GET['request_id'] irequest = InstanceRequest.objects.get(id=request_id) if irequest.user != request.user: return redirect("/") uploaded = irequest.gtfsfile_set.count() files = [] for file in irequest.gtfsfile_set.all(): file_name = file.s3_key if (file.s3_key is not None) else file.transload_url file_obj = {'name': file_name, 'id': file.id} files.append(file_obj) base_filename = "uploads/%s/%s_" % (request_id, str(uuid.uuid4())) upload_filename = base_filename + "${filename}" aws_access_key_id = settings.AWS_ACCESS_KEY_ID after_upload_url = request.build_absolute_uri("/done_upload") policy = make_s3_policy(base_filename, after_upload_url) base64_policy = base64.b64encode(policy) signature = s3_sign(policy, settings.AWS_SECRET_KEY) s3_bucket = settings.S3_BUCKET return render_to_response(request, 'upload.html', locals())
def create_request(request): if request.method == "GET": return render_to_response(request, 'create_request.html', locals()) else: form = InstanceRequestForm(request.REQUEST) irequest = form.save(commit=False) irequest.user = request.user irequest.ip = request.META['REMOTE_ADDR'] irequest.state = 'building' irequest.save() return redirect("/upload?request_id=%s" % irequest.id)
def index(request): conn = connect_ec2(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY) reservations = conn.get_all_instances() running_instances = [] for reservation in reservations: for instance in reservation.instances: image = AmazonMachineImage.objects.get(ami_id=instance.image_id) running_instances.append(dict(image=image, instance=instance)) images = AmazonMachineImage.objects.all() return render_to_response(request, 'manage_ec2/index.html', locals())
def finalize_request(request): request_id = request.POST['request_id'] irequest = InstanceRequest.objects.get(id=request_id) if irequest.user != request.user: return redirect("/") exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="transload", exchange=exchange) transloading = False s3_keys = [] to_transload = [] for gtfs_file in irequest.gtfsfile_set.all(): if gtfs_file.transload_url: transloading = True to_transload.append(gtfs_file) else: s3_keys.append(gtfs_file.s3_key) if transloading: irequest.state = 'pre_transload' irequest.save() for gtfs_file in to_transload: publisher.publish({ "transload": gtfs_file.transload_url, "gtfs_file_id": gtfs_file.id }) else: irequest.state = 'submitted' irequest.save() publisher = conn.Producer(routing_key="validate_request", exchange=exchange) publisher.publish({"files": s3_keys, "request_id": irequest.id}) # start validator instance, if needed check_for_running_instance(settings.VALIDATOR_AMI_ID) publisher.close() return render_to_response(request, 'request_submitted.html', locals())
def finalize_request(request): request_id = request.POST['request_id'] irequest = InstanceRequest.objects.get(id=request_id) if irequest.user != request.user: return redirect("/") exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="transload", exchange=exchange) transloading=False s3_keys = [] to_transload = [] for gtfs_file in irequest.gtfsfile_set.all(): if gtfs_file.transload_url: transloading = True to_transload.append(gtfs_file) else: s3_keys.append(gtfs_file.s3_key) if transloading: irequest.state = 'pre_transload' irequest.save() for gtfs_file in to_transload: publisher.publish({"transload": gtfs_file.transload_url, "gtfs_file_id" : gtfs_file.id}) else: irequest.state = 'submitted' irequest.save() publisher = conn.Producer(routing_key="validate_request", exchange=exchange) publisher.publish({"files" : s3_keys, "request_id" : irequest.id}) # start validator instance, if needed check_for_running_instance(settings.VALIDATOR_AMI_ID) publisher.close() return render_to_response(request, 'request_submitted.html', locals())
def index(request): return render_to_response(request, 'index.html')
def transload(request): request_id = request.REQUEST['request_id'] irequest = InstanceRequest.objects.get(id=request_id) uploaded = irequest.gtfsfile_set.count() return render_to_response(request, 'transload.html', locals())