Exemple #1
0
def download_datafiles(request):

    if (len(request.POST.getlist('datafile')) == 0 \
        and len(request.POST.getlist('dataset')) == 0):

        response = HttpResponseNotFound()
        response.write('<p>No files selected!</p>\n')
        return response

    from string import atoi
    par = models.ExperimentParameter.objects.filter(parameterset__experiment=
                                                    atoi(request.POST.get('expid')))

    epn = par.get(name__name='EPN').string_value

    datafiles = request.POST.getlist('datafile')
    datasets = request.POST.getlist('dataset')

    file_string = ""
    for dsid in datasets:
        if has_dataset_access(request, dsid):
            for datafile in models.Dataset_File.objects.filter(dataset=dsid):
                absolute_filename = datafile.url.partition('://')[2]
                file_string += absolute_filename + "\\r\\nTARDIS\\r\\n"

    for dfid in datafiles:
        if has_datafile_access(request, dfid):
            datafile = models.Dataset_File.objects.get(pk=dfid)
            if not datafile.dataset.id in datasets:
                absolute_filename = datafile.url.partition('://')[2]
                file_string += absolute_filename + "\\r\\nTARDIS\\r\\n"

    download = VBLDownload(request)
    return download.download(epn, file_string)
Exemple #2
0
def get_question(request, question_id=0):
    try:
        q = Question.objects.get(pk=question_id)  # identical to id=1
    except Exception, ex:
        r = HttpResponseNotFound()
        r.write("Error, reason: '%s'" % ex)
        return r
Exemple #3
0
def handle_single_event_request(request):
    """
	Handles single event POST requests.
	"""
    eventData = request.POST

    # Parameters that are always passed with each event
    email = eventData.get("email", None)
    event = eventData.get("event", None)

    message_id = eventData.get("message_id", None)
    if message_id:
        try:
            emailMessage = EmailMessage.objects.get(message_id=message_id)
        except EmailMessage.DoesNotExist:
            msg = "EmailMessage with message_id {m} does not exist"
            logger.exception(msg.format(m=message_id))

            response = HttpResponseNotFound()
            response.write(msg.format(m=message_id) + "\n")
        else:
            eventObj = Event.objects.create(
                email_message=emailMessage, email=email, type=EventType.objects.get(name=event.upper())
            )

            response = HttpResponse()
    else:
        msg = "Expected 'message_id' was not found in event data"
        logger.exception(msg)

        response = HttpResponseBadRequest()
        response.write(msg + "\n")

    return response
Exemple #4
0
def handle_single_event_request(request):
    """
	Handles single event POST requests.
	"""
    eventData = request.POST

    # Parameters that are always passed with each event
    email = eventData.get("email", None)
    event = eventData.get("event", None)

    message_id = eventData.get("message_id", None)
    if message_id:
        try:
            emailMessage = EmailMessage.objects.get(message_id=message_id)
        except EmailMessage.DoesNotExist:
            msg = "EmailMessage with message_id {m} does not exist"
            logger.exception(msg.format(m=message_id))

            response = HttpResponseNotFound()
            response.write(msg.format(m=message_id) + "\n")
        else:
            eventObj = Event.objects.create(
                email_message=emailMessage,
                email=email,
                type=EventType.objects.get(name=event.upper()),
            )

            response = HttpResponse()
    else:
        msg = "Expected 'message_id' was not found in event data"
        logger.exception(msg)

        response = HttpResponseBadRequest()
        response.write(msg + "\n")

    return response
Exemple #5
0
def deleteShareFromContent(request):
	#print request.user
	#print request.body
	#print request.POST
	if (request.user.is_authenticated()):
		if request.method == 'POST':
			#typeOf = request.POST['typeOf']
			shareId = request.POST['shareId']
			toDeleteId = request.POST['toDeleteId']
			#uniqueId = request.POST['unique']
			#print uniqueId
		
			#Delete an item that the user was sharing with other users.
			try:
				shared_content = Share.objects.get(unique=shareId)
				shared_from = shared_content.shared_from
				if (shared_from.email == request.user.email):	#If the user is the owner
					try:
						sync = Sync.objects.get(unique=toDeleteId)
						#print "sync deleted"
						shared_content.shared.remove(sync)
					except Sync.DoesNotExist:
						response = HttpResponseNotFound()
						response.write("Not Found")
					checkIfEmpty(shared_content)
					#t = shared_content.shared.all()
					#print "Length of shared.all()"
					#print len(t)
					#if (len(t) is 0):
					#	print "deleted"
						#If it is 0, it means, there is nothing to share:
					#	shared_content.delete()
					#for h in t:
					#	print h.title
					#print "sync deleted 2"
					response = HttpResponse()
					response.write("OK")
				else:
					response = HttpResponseNotAllowed('You are not the owner')						
			except Share.DoesNotExist:
				response = HttpResponseNotFound()
				response.write("Not Found")			
		else:
			response = HttpResponseNotAllowed("Only POST allowed")
	else:
		response.status_code= 401
		response['error'] = 'Unauthorized'
		response.content = 'Unauthorized'
	#response= HttpResponse()
	return response
Exemple #6
0
 def process_request(self, request):
     # Find the admin file and serve it up, if it exists and is readable.
     if not request.path.startswith(self.media_url):
         return None
     relative_url = re.sub(r'/+', '/', request.path)[len(self.media_url):]
     file_path = os.path.join(self.media_dir, relative_url)
     if not os.path.exists(file_path):
         resp = HttpResponseNotFound()
         resp.write("Not Found")
     else:
         try:
             fp = open(file_path, 'rb')
             resp = HttpResponse(
                 content_type = mimetypes.guess_type(file_path)[0])
             resp.write(fp.read())
             fp.close()
         except IOError:
             resp = HttpResponseForbidden()
             resp.write("Forbidden")
     return resp
Exemple #7
0
def download_datafiles(request):

    # Create the HttpResponse object with the appropriate headers.
    # TODO: handle no datafile, invalid filename, all http links
    # (tarfile count?)
    expid = request.POST['expid']
    fileString = ''

    comptype = "zip"
    if 'comptype' in request.POST:
        comptype = request.POST['comptype']

    if 'datafile' in request.POST:
        if len(request.POST.getlist('datafile')) > 500:
            comptype = "tar"

    if 'dataset' in request.POST:
        comptype = "tar" #todo quickfix, calc how many files

    # the following protocols can be handled by this module
    protocols = ['', 'file', 'tardis']
    known_protocols = len(protocols)
    if 'datafile' in request.POST or 'dataset' in request.POST:
        if (len(request.POST.getlist('datafile')) > 0 \
                or len(request.POST.getlist('dataset'))) > 0:

            datasets = request.POST.getlist('dataset')
            datafiles = request.POST.getlist('datafile')

            for dsid in datasets:
                for datafile in Dataset_File.objects.filter(dataset=dsid):
                    if has_datafile_access(request=request,
                                            dataset_file_id=datafile.id):
                        p = datafile.protocol
                        if not p in protocols:
                            protocols += [p]
                        absolute_filename = datafile.url.partition('//')[2]
                        if(datafile.url.partition('//')[0] == 'tardis:'):
                            #temp fix for old data
                            filepath = '%s/%s/%s' %\
                            (expid, str(datafile.dataset.id),
                                absolute_filename)

                            print filepath + "######"

                            try:
                                wrapper = FileWrapper(file(
                                    datafile.get_absolute_filepath()))\
                                #exists test. os.exists broken
                            except IOError:
                                print "OLD FILE DETECTED"
                                filepath = '%s/%s' % (expid, absolute_filename)

                            fileString += ('\"' + filepath + '\" ')
                            print fileString
                        else:
                            fileString += '\"%s/%s\" ' %\
                            (expid, absolute_filename)


            for dfid in datafiles:
                datafile = Dataset_File.objects.get(pk=dfid)
                if datafile.dataset.id in datasets:
                    continue
                if has_datafile_access(request=request,
                        dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition('//')[2]
                    if(datafile.url.partition('//')[0] == 'tardis:'):
                        #temp fix for old data
                        filepath = '\"%s/%s/%s\" ' %\
                        (expid, str(datafile.dataset.id),
                            absolute_filename)

                        print filepath + "######"

                        try:
                            wrapper = FileWrapper(file(
                                datafile.get_absolute_filepath()))\
                            #exists test. os.exists broken
                        except IOError:
                            print "OLD FILE DETECTED"
                            filepath = '\"%s/%s\" ' %\
                                       (expid, absolute_filename)

                        fileString += filepath
                        print fileString
                    else:
                        fileString += '\"%s/%s\" ' % (expid, absolute_filename)
        else:
            return return_response_not_found(request)

    elif 'url' in request.POST:
        if not len(request.POST.getlist('url')) == 0:
            comptype = "tar" #todo quickfix for zip error
            fileString = ""
            for url in request.POST.getlist('url'):
                url = urllib.unquote(url)
                raw_path = url.partition('//')[2]
                experiment_id = request.POST['expid']
                datafile = Dataset_File.objects.filter(url__endswith=raw_path,
                    dataset__experiment__id=experiment_id)[0]
                if has_datafile_access(request=request,
                                       dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition('//')[2]
                    if(datafile.url.partition('//')[0] == 'tardis:'):
                        # expects tardis: formatted stuff
                        # to not include dataset id

                        #temp fix for old data
                        filepath = '\"%s/%s/%s\" ' %\
                            (expid, str(datafile.dataset.id),
                            absolute_filename)

                        print filepath + "######"

                        try:
                            wrapper = FileWrapper(file(
                                datafile.get_absolute_filepath()))\
                            #exists test. os.exists broken
                        except IOError:
                            print "OLD FILE DETECTED"
                            filepath = '\"%s/%s\" ' %\
                                       (expid, absolute_filename)

                        fileString += ('\"' + filepath + '\" ')
                        print fileString
                    else:
                        fileString += '\"%s/%s\" ' % (expid, absolute_filename)
        else:
            return return_response_not_found(request)
    else:
        return return_response_not_found(request)

    # more than one external download location?
    if len(protocols) > known_protocols + 2:
        response = HttpResponseNotFound()
        response.write('<p>Different locations selected!</p>\n')
        response.write('Please limit your selection and try again.\n')
        return response

    # redirect request if another (external) download protocol was found
    elif len(protocols) == known_protocols + 1:
        from django.core.urlresolvers import reverse, resolve
        try:
            for module in settings.DOWNLOAD_PROVIDERS:
                if module[0] == protocols[3]:
                    url = reverse('%s.download_datafiles' % module[1])
                    view, args, kwargs = resolve(url)
                    kwargs['request'] = request
                    return view(*args, **kwargs)
        except:
            return return_response_not_found(request)

    else:
        # tarfile class doesn't work on large files being added and
        # streamed on the fly, so going command-line-o
        if not fileString:
            return return_response_error(request)

        if comptype == "tar":
            cmd = 'tar -C %s -c %s' % (settings.FILE_STORE_PATH,
                                       fileString)

            # logger.info(cmd)
            response = \
                HttpResponse(FileWrapper(subprocess.Popen(
                                                    cmd,
                                                    stdout=subprocess.PIPE,
                                                    stderr=open(devnull, 'w'),
                                                    shell=True).stdout),
                             mimetype='application/x-tar')
            response['Content-Disposition'] = \
                    'attachment; filename="experiment%s-selection.tar"' % expid
            return response
        else:
            cmd = 'cd %s; zip -r - %s' % (settings.FILE_STORE_PATH,
                                       fileString)
            # logger.info(cmd)
            response = \
                HttpResponse(FileWrapper(subprocess.Popen(
                                                    cmd,
                                                    stdout=subprocess.PIPE,
                                                    stderr=open(devnull, 'w'),
                                                    shell=True).stdout),
                             mimetype='application/zip')
            response['Content-Disposition'] = \
                    'attachment; filename="experiment%s-selection.zip"' % expid
            return response
Exemple #8
0
def view_404(request):
    response = HttpResponseNotFound()
    response.write("The path is not found")
    return response
def download_datafiles(request):

    # Create the HttpResponse object with the appropriate headers.
    # TODO: handle no datafile, invalid filename, all http links
    # (tarfile count?)
    expid = request.POST['expid']
    protocols = []
    fileString = ''
    fileSize = 0

    # the following protocols can be handled by this module
    protocols = ['', 'file', 'tardis']

    if 'datafile' or 'dataset' in request.POST:

        if (len(request.POST.getlist('datafile')) > 0 \
                or len(request.POST.getlist('dataset'))) > 0:

            datasets = request.POST.getlist('dataset')
            datafiles = request.POST.getlist('datafile')

            for dsid in datasets:
                for datafile in Dataset_File.objects.filter(dataset=dsid):
                    if has_datafile_access(request=request,
                                           dataset_file_id=datafile.id):
                        p = datafile.protocol
                        if not p in protocols:
                            protocols += [p]

                        absolute_filename = datafile.url.partition('//')[2]
                        fileString += '%s/%s ' % (expid, absolute_filename)
                        fileSize += long(datafile.size)

            for dfid in datafiles:
                datafile = Dataset_File.objects.get(pk=dfid)
                if datafile.dataset.id in datasets:
                    continue
                if has_datafile_access(request=request,
                                        dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition('//')[2]
                    fileString += '%s/%s ' % (expid, absolute_filename)
                    fileSize += long(datafile.size)

        else:
            return return_response_not_found(request)

    # TODO: check if we really still need this method
    elif 'url' in request.POST:

        if not len(request.POST.getlist('url')) == 0:
            for url in request.POST.getlist('url'):
                datafile = \
                    Dataset_File.objects.get(url=urllib.unquote(url),
                        dataset__experiment__id=request.POST['expid'])
                if has_datafile_access(request=request,
                                       dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition('//')[2]
                    fileString += '%s/%s ' % (expid, absolute_filename)
                    fileSize += long(datafile.size)

        else:
            return return_response_not_found(request)
    else:
        return return_response_not_found(request)

    # more than one external download location?
    if len(protocols) > 4:
        response = HttpResponseNotFound()
        response.write('<p>Different locations selected!</p>\n')
        response.write('Please limit your selection and try again.\n')
        return response

    # redirect request if another (external) download protocol was found
    elif len(protocols) == 4:
        from django.core.urlresolvers import resolve
        view, args, kwargs = resolve('/%s%s' % (protocols[4],
                                                request.path))
        kwargs['request'] = request
        return view(*args, **kwargs)

    else:
        # tarfile class doesn't work on large files being added and
        # streamed on the fly, so going command-line-o
        if not fileString:
            return return_response_error(request)

        cmd = 'tar -C %s -c %s' % (settings.FILE_STORE_PATH,
                                   fileString)

        # logger.info(cmd)
        response = \
            HttpResponse(FileWrapper(subprocess.Popen(cmd,
                                                      stdout=subprocess.PIPE,
                                                      shell=True).stdout),
                         mimetype='application/x-tar')
        response['Content-Disposition'] = \
                'attachment; filename="experiment%s.tar"' % expid
        response['Content-Length'] = fileSize + 5120
        return response
Exemple #10
0
def download_datafiles(request):

    # Create the HttpResponse object with the appropriate headers.
    # TODO: handle no datafile, invalid filename, all http links
    # (tarfile count?)
    expid = request.POST['expid']
    fileString = ''

    comptype = "zip"
    if 'comptype' in request.POST:
        comptype = request.POST['comptype']

    if 'datafile' in request.POST:
        if len(request.POST.getlist('datafile')) > 500:
            comptype = "tar"

    if 'dataset' in request.POST:
        comptype = "tar"  #todo quickfix, calc how many files

    # the following protocols can be handled by this module
    protocols = ['', 'file', 'tardis']
    known_protocols = len(protocols)
    if 'datafile' in request.POST or 'dataset' in request.POST:
        if (len(request.POST.getlist('datafile')) > 0 \
                or len(request.POST.getlist('dataset'))) > 0:

            datasets = request.POST.getlist('dataset')
            datafiles = request.POST.getlist('datafile')

            for dsid in datasets:
                for datafile in Dataset_File.objects.filter(dataset=dsid):
                    if has_datafile_access(request=request,
                                           dataset_file_id=datafile.id):
                        p = datafile.protocol
                        if not p in protocols:
                            protocols += [p]
                        absolute_filename = datafile.url.partition('//')[2]
                        if (datafile.url.partition('//')[0] == 'tardis:'):
                            #temp fix for old data
                            filepath = '%s/%s/%s' %\
                            (expid, str(datafile.dataset.id),
                                absolute_filename)

                            print filepath + "######"

                            try:
                                wrapper = FileWrapper(file(
                                    datafile.get_absolute_filepath()))\
                                #exists test. os.exists broken

                            except IOError:
                                print "OLD FILE DETECTED"
                                filepath = '%s/%s' % (expid, absolute_filename)

                            fileString += ('\"' + filepath + '\" ')
                            print fileString
                        else:
                            fileString += '\"%s/%s\" ' %\
                            (expid, absolute_filename)

            for dfid in datafiles:
                datafile = Dataset_File.objects.get(pk=dfid)
                if datafile.dataset.id in datasets:
                    continue
                if has_datafile_access(request=request,
                                       dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition('//')[2]
                    if (datafile.url.partition('//')[0] == 'tardis:'):
                        #temp fix for old data
                        filepath = '\"%s/%s/%s\" ' %\
                        (expid, str(datafile.dataset.id),
                            absolute_filename)

                        print filepath + "######"

                        try:
                            wrapper = FileWrapper(file(
                                datafile.get_absolute_filepath()))\
                            #exists test. os.exists broken

                        except IOError:
                            print "OLD FILE DETECTED"
                            filepath = '\"%s/%s\" ' %\
                                       (expid, absolute_filename)

                        fileString += filepath
                        print fileString
                    else:
                        fileString += '\"%s/%s\" ' % (expid, absolute_filename)
        else:
            return return_response_not_found(request)

    elif 'url' in request.POST:
        if not len(request.POST.getlist('url')) == 0:
            comptype = "tar"  #todo quickfix for zip error
            fileString = ""
            for url in request.POST.getlist('url'):
                url = urllib.unquote(url)
                raw_path = url.partition('//')[2]
                experiment_id = request.POST['expid']
                datafile = Dataset_File.objects.filter(
                    url__endswith=raw_path,
                    dataset__experiment__id=experiment_id)[0]
                if has_datafile_access(request=request,
                                       dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition('//')[2]
                    if (datafile.url.partition('//')[0] == 'tardis:'):
                        # expects tardis: formatted stuff
                        # to not include dataset id

                        #temp fix for old data
                        filepath = '\"%s/%s/%s\" ' %\
                            (expid, str(datafile.dataset.id),
                            absolute_filename)

                        print filepath + "######"

                        try:
                            wrapper = FileWrapper(file(
                                datafile.get_absolute_filepath()))\
                            #exists test. os.exists broken

                        except IOError:
                            print "OLD FILE DETECTED"
                            filepath = '\"%s/%s\" ' %\
                                       (expid, absolute_filename)

                        fileString += ('\"' + filepath + '\" ')
                        print fileString
                    else:
                        fileString += '\"%s/%s\" ' % (expid, absolute_filename)
        else:
            return return_response_not_found(request)
    else:
        return return_response_not_found(request)

    # more than one external download location?
    if len(protocols) > known_protocols + 2:
        response = HttpResponseNotFound()
        response.write('<p>Different locations selected!</p>\n')
        response.write('Please limit your selection and try again.\n')
        return response

    # redirect request if another (external) download protocol was found
    elif len(protocols) == known_protocols + 1:
        from django.core.urlresolvers import reverse, resolve
        try:
            for module in settings.DOWNLOAD_PROVIDERS:
                if module[0] == protocols[3]:
                    url = reverse('%s.download_datafiles' % module[1])
                    view, args, kwargs = resolve(url)
                    kwargs['request'] = request
                    return view(*args, **kwargs)
        except:
            return return_response_not_found(request)

    else:
        # tarfile class doesn't work on large files being added and
        # streamed on the fly, so going command-line-o
        if not fileString:
            return return_response_error(request)

        if comptype == "tar":
            cmd = 'tar -C %s -c %s' % (settings.FILE_STORE_PATH, fileString)

            # logger.info(cmd)
            response = \
                HttpResponse(FileWrapper(subprocess.Popen(
                                                    cmd,
                                                    stdout=subprocess.PIPE,
                                                    shell=True).stdout),
                             mimetype='application/x-tar')
            response['Content-Disposition'] = \
                    'attachment; filename="experiment%s-selection.tar"' % expid
            return response
        else:
            cmd = 'cd %s; zip -r - %s' % (settings.FILE_STORE_PATH, fileString)
            # logger.info(cmd)
            response = \
                HttpResponse(FileWrapper(subprocess.Popen(
                                                    cmd,
                                                    stdout=subprocess.PIPE,
                                                    shell=True).stdout),
                             mimetype='application/zip')
            response['Content-Disposition'] = \
                    'attachment; filename="experiment%s-selection.zip"' % expid
            return response
def download_datafiles(request):

    # Create the HttpResponse object with the appropriate headers.
    # TODO: handle no datafile, invalid filename, all http links
    # (tarfile count?)
    expid = request.POST["expid"]
    fileString = ""
    fileSize = 0

    comptype = "zip"
    if "comtype" in request.POST:
        comptype = request.POST["comptype"]

    # the following protocols can be handled by this module
    protocols = ["", "file", "tardis"]
    known_protocols = len(protocols)

    if "datafile" or "dataset" in request.POST:
        if (len(request.POST.getlist("datafile")) > 0 or len(request.POST.getlist("dataset"))) > 0:

            datasets = request.POST.getlist("dataset")
            datafiles = request.POST.getlist("datafile")

            for dsid in datasets:
                for datafile in Dataset_File.objects.filter(dataset=dsid):
                    if has_datafile_access(request=request, dataset_file_id=datafile.id):
                        p = datafile.protocol
                        if not p in protocols:
                            protocols += [p]
                        absolute_filename = datafile.url.partition("//")[2]
                        if datafile.url.partition("//")[0] == "tardis:":
                            fileString += "%s/%s/%s " % (expid, str(datafile.dataset.id), absolute_filename)
                        else:
                            fileString += "%s/%s " % (expid, absolute_filename)
                        fileSize += long(datafile.size)

            for dfid in datafiles:
                datafile = Dataset_File.objects.get(pk=dfid)
                if datafile.dataset.id in datasets:
                    continue
                if has_datafile_access(request=request, dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition("//")[2]
                    if datafile.url.partition("//")[0] == "tardis:":
                        fileString += "%s/%s/%s " % (expid, str(datafile.dataset.id), absolute_filename)
                    else:
                        fileString += "%s/%s " % (expid, absolute_filename)
                    fileSize += long(datafile.size)
        else:
            return return_response_not_found(request)

    elif "url" in request.POST:
        if not len(request.POST.getlist("url")) == 0:
            fileString = ""
            fileSize = 0
            for url in request.POST.getlist("url"):
                url = urllib.unquote(url)
                raw_path = url.partition("//")[2]
                experiment_id = request.POST["expid"]
                datafile = Dataset_File.objects.filter(url__endswith=raw_path, dataset__experiment__id=experiment_id)[0]
                if has_datafile_access(request=request, dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition("//")[2]
                    if datafile.url.partition("//")[0] == "tardis:":
                        # expects tardis: formatted stuff to not include dataset id
                        fileString += "%s/%s/%s " % (expid, str(datafile.dataset.id), absolute_filename)
                    else:
                        fileString += "%s/%s " % (expid, absolute_filename)
                    fileSize += long(datafile.size)
        else:
            return return_response_not_found(request)
    else:
        return return_response_not_found(request)

    # more than one external download location?
    if len(protocols) > known_protocols + 2:
        response = HttpResponseNotFound()
        response.write("<p>Different locations selected!</p>\n")
        response.write("Please limit your selection and try again.\n")
        return response

    # redirect request if another (external) download protocol was found
    elif len(protocols) == known_protocols + 1:
        from django.core.urlresolvers import reverse, resolve

        try:
            for module in settings.DOWNLOAD_PROVIDERS:
                if module[0] == protocols[3]:
                    url = reverse("%s.download_datafiles" % module[1])
                    view, args, kwargs = resolve(url)
                    kwargs["request"] = request
                    return view(*args, **kwargs)
        except:
            return return_response_not_found(request)

    else:
        # tarfile class doesn't work on large files being added and
        # streamed on the fly, so going command-line-o
        if not fileString:
            return return_response_error(request)

        if comptype == "tar":
            cmd = "tar -C %s -c %s" % (settings.FILE_STORE_PATH, fileString)

            # logger.info(cmd)
            response = HttpResponse(
                FileWrapper(subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True).stdout),
                mimetype="application/x-tar",
            )
            response["Content-Disposition"] = 'attachment; filename="experiment%s-selection.tar"' % expid
            response["Content-Length"] = fileSize + 5120
            return response
        else:
            cmd = "cd %s; zip -r - %s" % (settings.FILE_STORE_PATH, fileString)

            # logger.info(cmd)
            response = HttpResponse(
                FileWrapper(subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True).stdout),
                mimetype="application/zip",
            )
            response["Content-Disposition"] = 'attachment; filename="experiment%s-selection.zip"' % expid
            response["Content-Length"] = fileSize + 5120
            return response
def view_404(request):
    response = HttpResponseNotFound()
    template = loader.get_template('404.html')
    response.write(template.render(RequestContext(request)))
    return response