Esempio n. 1
0
        'markup': markup,
    }, RequestContext(request))


def fix_markup(markup):
    markup = markup.replace("{{{", "<pre>")
    markup = markup.replace("}}}", "</pre>")
    return markup


def serve_file(request, id):

    moz_package = get_object_or_404(models.MozillaPackageBuild, id=id)
    filename = moz_package.build_package_name
    fullname = "%s/%s" % (BUILD_DIR, filename)
    try:
        f = file(fullname, "rb")
    except Exception, e:
        print e
        return page_not_found(request, template_name='404.html')
    try:
        wrapper = FileWrapper(f)
        response = HttpResponse(wrapper,
                                mimetype=mimetypes.guess_type(filename)[0])
        response['Content-Length'] = os.path.getsize(fullname)
        response['Content-Disposition'] = 'attachment; filename={0}'.format(
            filename)
        return response
    except Exception, e:
        return page_not_found(request, template_name='500.html')
Esempio n. 2
0
def showresults(request, filename):
    f = open(getJobfile(filename), "rb")
    response = HttpResponse(FileWrapper(f), content_type='text/csv')
    response['Content-Disposition'] = 'attachment; filename="%s"' % filename
    return response
Esempio n. 3
0
def job_download(request, pk):
    import os
    from mimetypes import guess_type
    from django.core.servers.basehttp import FileWrapper
    from django.http import HttpResponse

    epoxy = Epoxy(request)  # useful to handle errors
    try:
        j = Job.objects.get(pk=pk)
    except Job.DoesNotExist, e:
        return epoxy.throw_error(code=API_EXCEPTION_DOESNOTEXIST,
                                 error=e).json()

    filepath = os.path.join(j.get_path(), 'urls_to_zip.zip')

    if not os.path.exists(filepath):
        return epoxy.throw_error(
            code=API_EXCEPTION_DOESNOTEXIST,
            error='Job does not seem to have any downloadable file associated'
        ).json()

    content_type = guess_type(filepath)
    wrapper = FileWrapper(file(filepath))
    response = HttpResponse(wrapper, content_type=content_type[0])
    response['Content-Length'] = os.path.getsize(filepath)
    response[
        'Content-Disposition'] = 'attachment; filename=%s--%s[zup].zip' % (
            j.slug, j.date_created.strftime('%Y-%m-%d--%H-%M-%S'))
    return response
Esempio n. 4
0
File: chips.py Progetto: zjwang6/TS
def getChipPdf(request, path):
    """Download Report document in PDF format"""
    import re
    from django.core.servers.basehttp import FileWrapper

    def runFromShell(cmd1):
        p1 = subprocess.Popen(cmd1,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)
        stdout, stderr = p1.communicate()
        return p1

    path = os.path.join("/", path)

    # File I/O setup
    tmpstem = os.path.basename(path).split(".")[0]
    tmphtml = os.path.join("/tmp", tmpstem + ".html")
    tmppdf = os.path.join("/tmp", tmpstem + ".pdf")
    tf = tarfile.open(path)
    ti = tf.extractfile("InitLog.txt")

    # regular expressions for the string parsing
    ph = re.compile(r"\d*\)\sW2\spH=\d*.\d*")
    phpass = re.compile(r"(W2\sCalibrate\sPassed\sPH=)(\d*.\d*)")
    adding = re.compile(r"(\d*\)\sAdding\s)(\d*.\d*)(\sml)")
    datefilter = re.compile(
        r"Sun|Mon|Tue|Wed|Thu|Fri|Sat"
    )  # Simple filter for the line with the date on it.
    namefilter = re.compile(r"(Name:\s*)([a-z][a-z0-9_]*)", re.IGNORECASE)
    serialfilter = re.compile(r"(Serial)(.*?)(:)(\s*)([a-z][a-z0-9_]*)",
                              re.IGNORECASE)
    surfacefilter = re.compile(r"(surface)(=)((?:[a-z][a-z]+))(\\s+)",
                               re.IGNORECASE)
    rawtracefilter = re.compile(
        r"(RawTraces)(\s+)((?:[a-z][a-z]*[0-9]+[a-z0-9]*))(:)(\s+)([+-]?\d*\.\d+)(?![-+0-9\.])",
        re.IGNORECASE,
    )

    # initialize variables
    initialph = ""
    finalph = ""
    totalAdded = 0.0
    iterationBuffer = ""
    rawtraceBuffer = ""
    totalIterations = 0
    startdate = ""
    enddate = ""
    pgmname = ""
    serialnumber = ""
    calstatus = ""
    surface = ""
    rawtracestartdate = ""

    # Log file parsing
    for line in ti.readlines():
        test = namefilter.match(line)
        if test:
            pgmname = test.group(2)

        test = serialfilter.match(line)
        if test:
            serialnumber = test.group(5)

        test = ph.match(line)
        if test:
            if initialph == "":
                initialph = test.group().split("=")[1]
            iterationBuffer += "<tr><td>%s</td></tr>\n" % test.group()
            totalIterations += 1

        test = adding.match(line)
        if test:
            iterationBuffer += "<tr><td>%s</td></tr>\n" % test.group()
            totalAdded += float(test.group(2))

        test = phpass.match(line)
        if test:
            finalph = test.group(2)
            calstatus = "PASSED"

        if datefilter.match(line):
            if startdate == "":
                startdate = line.strip()

        test = surfacefilter.match(line)
        if test:
            surface = test.group(3)

        test = rawtracefilter.match(line)
        if test:
            rawtraceBuffer += "<tr><td>%s %s: %s</td></tr>\n" % (
                test.group(1),
                test.group(3),
                test.group(6),
            )

    # Find the end date of the Chip Calibration - we need multilines to identify the end date entry
    # We are assuming that line endings are always newline char.
    ti.seek(0)
    contents = ti.read()
    enddatefilter = re.compile(
        "^(W2 Calibrate Passed.*$\n)(Added.*$\n)([Sun|Mon|Tue|Wed|Thu|Fri|Sat].*$)",
        re.MULTILINE,
    )
    m = enddatefilter.search(contents, re.MULTILINE)
    if m:
        enddate = m.group(3)

    startrawfilter = re.compile(
        "([Sun|Mon|Tue|Wed|Thu|Fri|Sat].*$\n)(RawTraces.*$)", re.MULTILINE)
    m = startrawfilter.search(contents, re.MULTILINE)
    if m:
        rawtracestartdate = m.group(1)
        rawtraceBuffer = ("<tr><td>Raw Traces</td><td></td><td>%s</td></tr>" %
                          rawtracestartdate) + rawtraceBuffer

    tf.close()

    f = open(tmphtml, "w")
    f.write("<html>\n")
    f.write(
        "<img src='/var/www/site_media/images/logo_top_right_banner.png' alt='lifetechnologies, inc.'/>"
    )
    # If there are sufficient errors in parsing, display an error banner
    if calstatus == "" and finalph == "":
        f.write('<table width="100%">')
        f.write("<tr><td></td></tr>")
        f.write(
            "<tr><td align=center><hr /><font color=red><i><h2>* * * Error parsing InitLog.txt * * *</h2></i></font><hr /></td></tr>"
        )
        f.write("<tr><td></td></tr>")
        f.write("</table>")
    else:
        f.write('<table width="100%">')
        f.write("<tr><td></td></tr>")
        f.write(
            "<tr><td align=center><hr /><i><h2>Instrument Installation Report</h2></i><hr /></td></tr>"
        )
        f.write("<tr><td></td></tr>")
        f.write("</table>")

    f.write('<table width="100%">')
    f.write("<tr><td>Instrument Name</td><td>%s</td></tr>\n" % (pgmname))
    f.write("<tr><td>Serial Number</td><td>%s</td></tr>\n" % (serialnumber))
    f.write("<tr><td>Chip Surface</td><td>%s</td></tr>\n" % (surface))
    f.write("<tr><td>Initial pH:</td><td>%s</td><td>%s</td></tr>\n" %
            (initialph, startdate))
    f.write("<tr><td></td></tr>\n")  # puts a small line space
    f.write(iterationBuffer)
    f.write("<tr><td></td></tr>\n")  # puts a small line space
    f.write("<tr><td>Total Hydroxide Added:</td><td>%0.2f ml</td></tr>\n" %
            totalAdded)
    f.write("<tr><td>Total Iterations:</td><td>%d</td></tr>\n" %
            totalIterations)
    f.write("<tr><td>Final pH:</td><td>%s</td><td>%s</td></tr>\n" %
            (finalph, enddate))
    f.write("<tr><td></td></tr>\n")  # puts a small line space
    f.write(rawtraceBuffer)
    f.write("<tr><td></td></tr>\n")  # puts a small line space
    f.write(
        '<tr><td>Instrument Installation Status:</td><td><i><font color="#00aa00">%s</font></i></td></tr>\n'
        % calstatus)
    f.write("</table>")

    f.write("<br /><br />\n")
    f.write('<table frame="box">\n')
    f.write('<tr><th align="left">Acknowledged by:</th></tr>')
    f.write('<tr><td align="left"><br />\n')
    f.write(
        "__________________________________________________________<br />Customer Signature</td>"
    )
    f.write('<td align="left"><br />___________________<br />Date\n')
    f.write("</td></tr>")
    f.write('<tr><td align="left"><br />\n')
    f.write(
        "__________________________________________________________<br />Life Tech FSE Signature</td>"
    )
    f.write('<td align="left"><br />___________________<br />Date\n')
    f.write("</td></tr></table></html>\n")
    f.close()
    pdf_cmd = [
        "/opt/ion/iondb/bin/wkhtmltopdf-amd64",
        str(tmphtml),
        str(tmppdf)
    ]
    runFromShell(pdf_cmd)
    os.unlink(tmphtml)
    response = HttpResponse(FileWrapper(open(tmppdf)),
                            mimetype="application/pdf")
    response["Content-Disposition"] = "attachment; filename=%s" % (
        os.path.basename(tmppdf))
    # Can we delete the pdf file now?  Yes,on my dev box...
    os.unlink(tmppdf)

    return response
            logging.exception(
                'There was an error exporting course {0}. {1}'.format(
                    course_module.location, unicode(e)))
            return render_to_response(
                'export.html', {
                    'context_course': course_module,
                    'in_err': True,
                    'unit': None,
                    'raw_err_msg': str(e),
                    'course_home_url': location.url_reverse("course"),
                    'export_url': export_url
                })
        finally:
            shutil.rmtree(root_dir / name)

        wrapper = FileWrapper(export_file)
        response = HttpResponse(wrapper, content_type='application/x-tgz')
        response[
            'Content-Disposition'] = 'attachment; filename=%s' % os.path.basename(
                export_file.name)
        response['Content-Length'] = os.path.getsize(export_file.name)
        return response

    elif 'text/html' in requested_format:
        return render_to_response('export.html', {
            'context_course': course_module,
            'export_url': export_url
        })

    else:
        # Only HTML or x-tgz request formats are supported (no JSON).
Esempio n. 6
0
def download_datafile(request, datafile_id):

    # todo handle missing file, general error
    datafile = Dataset_File.objects.get(pk=datafile_id)
    expid = datafile.dataset.experiment.id

    if has_datafile_access(request=request, dataset_file_id=datafile.id):
        url = datafile.url
        if url.startswith('http://') or url.startswith('https://') \
            or url.startswith('ftp://'):
            return HttpResponseRedirect(datafile.url)

        elif datafile.protocol == 'tardis' or \
            datafile.url.startswith('tardis'):

            raw_path = url.partition('//')[2]
            file_path = path.join(settings.FILE_STORE_PATH, str(expid),
                                  str(datafile.dataset.id), raw_path)
            try:

                wrapper = FileWrapper(file(file_path))
                response = HttpResponse(wrapper,
                                        mimetype=datafile.get_mimetype())
                response['Content-Disposition'] = \
                    'attachment; filename="%s"' % datafile.filename
                return response

            except IOError:
                try:
                    file_path = path.join(settings.FILE_STORE_PATH, str(expid),
                                          raw_path)
                    print file_path
                    wrapper = FileWrapper(file(file_path))
                    response = HttpResponse(wrapper,
                                            mimetype=datafile.get_mimetype())
                    response['Content-Disposition'] = \
                        'attachment; filename="%s"' % datafile.filename
                    return response
                except IOError:
                    return return_response_not_found(request)

        elif datafile.protocol in ['', 'file']:
            file_path = datafile.url.partition('://')[2]
            if not path.exists(file_path):

                return return_response_not_found(request)

        else:
            #            logger.error('exp %s: file protocol unknown: %s' % (expid,
            #                                                                datafile.url))
            return return_response_not_found(request)

    else:
        return return_response_error(request)

    try:
        wrapper = FileWrapper(file(file_path))
        response = HttpResponse(wrapper, mimetype=datafile.get_mimetype())
        response['Content-Disposition'] = \
            'attachment; filename="%s"' % datafile.filename
        return response

    except IOError:
        logger.exception()
        return return_response_not_found(request)
Esempio n. 7
0
def send_file(filename):
	# filename = __file__ # Select your file here.                                
	wrapper = FileWrapper(file(filename))
	response = HttpResponse(wrapper, content_type='text/plain')
	response['Content-Length'] = os.path.getsize(filename)
	return response
Esempio n. 8
0
def figure(request):  # figure页面
    if request.user.is_authenticated():
        user = request.user
    nodes = readnodes(user)
    community = readcommunity(user)
    node1 = nodes[0]
    node2 = nodes[1]
    node3 = nodes[2]
    node4 = nodes[3]
    node5 = nodes[4]
    com1 = community[0]
    com2 = community[1]
    com3 = community[2]
    com4 = community[3]
    com5 = community[4]
    url_nodecom = "/static/dv/%s/com.png" % user
    url_densitycom = "/static/dv/%s/dencom.png" % user
    url_center = "/static/dv/%s/center.png" % user
    top = readtop(user)
    least = readleast(user)
    top_least = share = [[0 for i in range(2)] for j in range(5)]
    for i in range(0, 5, 1):
        top_least[i][0] = top[i]
        top_least[i][1] = least[i]
    top1 = top_least[0]
    top2 = top_least[1]
    top3 = top_least[2]
    top4 = top_least[3]
    top5 = top_least[4]
    if request.POST:
        try:
            strings = request.POST['nodes']
            if not strings == None:
                filename = "/root/vina/demo/static/dv/%s/nodes.xls" % user  # 指定要下载的文件路径
                wrapper = FileWrapper(file(filename))
                response = HttpResponse(wrapper, content_type='text/plain')
                response['Content-Length'] = os.path.getsize(filename)
                response['Content-Encoding'] = 'utf-8'
                response[
                    'Content-Disposition'] = 'attachment;filename=%s' % filename
                return response
        except:
            strings = request.POST['comm']
            if not strings == None:
                filename = "/root/vina/demo/static/dv/%s/communites.xls" % user  # 指定要下载的文件路径
                wrapper = FileWrapper(file(filename))
                response = HttpResponse(wrapper, content_type='text/plain')
                response['Content-Length'] = os.path.getsize(filename)
                response['Content-Encoding'] = 'utf-8'
                response[
                    'Content-Disposition'] = 'attachment;filename=%s' % filename
                return response
    return render(
        request, 'figure.html', {
            "node1": node1,
            "node2": node2,
            "node3": node3,
            "node4": node4,
            "node5": node5,
            "com1": com1,
            "com2": com2,
            "com3": com3,
            "com4": com4,
            "com5": com5,
            "url_nodecom": url_nodecom,
            "url_densitycom": url_densitycom,
            "url_center": url_center,
            "top1": top1,
            "top2": top2,
            "top3": top3,
            "top4": top4,
            "top5": top5
        })
Esempio n. 9
0
def doDownload(request):

    fullName = request.POST.get('fullName')
    organization = request.POST.get('organization')
    email = request.POST.get('email')
    mailoption = request.POST.get('mailoption')
    country = request.POST.get('country')
    version = request.POST.get('version')
    platform = request.POST.get('platform')

    errors = ""

    if not len(fullName) > 0:
        errors += "Please fill in the fullName field.\n"
    if not len(organization) > 0:
        errors += "Please fill in the Organization field.\n"
    if not len(email) > 0:
        errors += "Please fill in the Email field.\n"
#     if not len(mailoption) > 0:
#         errors += "Please choose one into the Country field.\n"
    if not len(version) > 0:
        errors += "Please fill in the Scipion Version field.\n"
    if not len(platform) > 0:
        errors += "Please fill in the Platform field.\n"

    if len(errors) == 0:
        dbName = os.path.join(os.environ['SCIPION_HOME'], 'downloads.sqlite')
        #dbName = '/tmp/downloads.sqlite'

        mapper = SqliteFlatMapper(dbName, globals())
        mapper.enableAppend()
        download = DownloadRecord(fullName=fullName,
                                  organization=organization,
                                  email=email,
                                  subscription=mailoption,
                                  country=country,
                                  version=version,
                                  platform=platform)

        mapper.store(download)
        mapper.commit()
        mapper.close()
        "Return a response with the scipion download file"
        if platform == 'linuxbin':
            path = os.path.join(os.environ['SCIPION_HOME'], 'pyworkflow',
                                'web', 'pages', 'resources', 'install',
                                'scipion_all_packages_2015-06-29.tgz')
        else:
            path = os.path.join(os.environ['SCIPION_HOME'], 'pyworkflow',
                                'web', 'pages', 'resources', 'install',
                                'scipion_source_2015-06-29.tgz')
        if not os.path.exists(path):
            return HttpResponseNotFound('Path not found: %s' % path)

        response = HttpResponse(FileWrapper(open(path)),
                                content_type=mimetypes.guess_type(path)[0])
        response['Content-Length'] = os.path.getsize(path)
        response[
            'Content-Disposition'] = 'attachment; filename=%s' % os.path.basename(
                path)

        return response
    else:
        jsonStr = json.dumps({'errors': parseText(errors)}, ensure_ascii=False)

        return HttpResponse(jsonStr, mimetype='application/javascript')
Esempio n. 10
0
def djangoToExportFormat(request,
                         filter_object,
                         properties_list=None,
                         geom_col="geom",
                         format="geojson"):
    """Convert a GeoDjango QuerySet to a GeoJSON Object"""

    #Workaround for mutable default value
    if properties_list is None:
        properties_list = []
        #Return dictionary of key value pairs
        filter_dict = filter_object[0].__dict__
        #Remove bunk fields
        for d in filter_dict:
            if isinstance(filter_dict[d], django.db.models.base.ModelState):
                pass
            # Convert decimal to float
            elif isinstance(filter_dict[d], Decimal):
                for obj in filter_object:
                    setattr(obj, d, float(obj.__dict__[d]))
                properties_list.append(d)
            # Convert date to string
            elif isinstance(filter_dict[d], date):
                for obj in filter_object:
                    setattr(obj, d, str(obj.__dict__[d]))
                properties_list.append(d)
            # Convert time to string
            elif isinstance(filter_dict[d], time):
                for obj in filter_object:
                    setattr(obj, d, str(obj.__dict__[d]))
                properties_list.append(d)
            else:
                properties_list.append(d)

        properties_list.remove(geom_col)

    queryset = filter_object
    djf = Django.Django(geodjango=geom_col, properties=properties_list)
    decode_djf = djf.decode(queryset)
    if format.lower() == 'geojson':
        geoj = GeoJSON.GeoJSON()
        # Pretty Print using JSON dumps method. Note requires setting
        # vectorformats encode method to_string param to False.
        geom_out = dumps(geoj.encode(decode_djf, to_string=False),
                         indent=4,
                         separators=(',', ': '))
        response = HttpResponse(geom_out, content_type="text/plain")
    elif format.lower() == 'kml':
        # title property can be passed as a keyword arg.
        # See vectorformats kml.py
        kml = KML.KML(title_property='name')
        geom_out = kml.encode(decode_djf)
        response = HttpResponse(
            geom_out, content_type="application/vnd.google-earth.kml+xml")
        response['Content-Disposition'] = 'attachment; filename="kml_out.kml"'
    elif format.lower() == 'shp':
        # convert to GeoJSON, then Use Fiona to Create a Shapefile.
        geoj = GeoJSON.GeoJSON()
        geoJSON = dumps(geoj.encode(decode_djf, to_string=False),
                        indent=4,
                        separators=(',', ': '))

        # Hard source properties for the destination shapefile.
        # These will be passed to Fiona.
        shp_driver = 'ESRI Shapefile'
        shp_crs = {
            'no_defs': True,
            'ellps': 'WGS84',
            'datum': 'WGS84',
            'proj': 'longlat'
        }
        shp_schema = {
            'geometry': decode_djf[0].geometry['type'],
            'properties': {
                'addDate': 'str',
                'collectDate': 'str',
                'collectionMethod': 'str',
                'comment': 'str',
                'featurePurpose': 'str',
                'group': 'str',
                'name': 'str',
                'updateDate': 'str'
            }
        }

        upload_dir = make_temp_dir()
        zipdir = os.path.join(upload_dir, decode_djf[0].properties['group'])

        with fiona.open(zipdir,
                        'w',
                        driver=shp_driver,
                        crs=shp_crs,
                        schema=shp_schema) as dest_shp:
            for feature in decode_djf:
                out_feature = {'geometry': {}, 'properties': {}}
                for property in shp_schema['properties']:
                    out_feature['properties'][property] = feature[
                        'properties'][property]
                out_feature['geometry'] = feature['geometry']
                dest_shp.write(out_feature)

        # Create the zip archive
        zip = make_zip_archive(zipdir)
        shp_zip = open(zip.filename)
        response = HttpResponse(FileWrapper(shp_zip),
                                content_type='application/zip')
        response['Content-Disposition'] = 'attachment; filename=shp_out.zip'
        response['Content-Length'] = os.path.getsize(zip.filename)

    else:
        raise ValueError
    return response
Esempio n. 11
0
def alerts(request):
    access = get_access(request)
    poll_form = NewPollForm()
    range_form = rangeForm()
    poll_form.updateTypes()
    assign_polls = Poll.objects.exclude(start_date=None).order_by('-pk')[0:5]
    district_form = DistrictForm(request.POST or None)
    if request.GET.get('reset_districts', None):
        request.session['districts'] = None
        request.session['groups'] = None

    if district_form.is_valid():
        request.session['districts'] = [
            c.pk for c in district_form.cleaned_data['districts']
        ]

    groupform = AssignResponseGroupForm(request=request, access=access)
    if request.method == 'POST' and request.POST.get('groups', None):
        g_form = AssignResponseGroupForm(request.POST, request=request)
        if g_form.is_valid():
            request.session['groups'] = g_form.cleaned_data['groups']

    template = 'ureport/polls/alerts.html'
    if request.session.get('districts'):
        message_list = \
            Message.objects.filter(details__attribute__name='alert'

            ).filter(connection__contact__reporting_location__in=request.session.get('districts'))
    else:
        message_list = Message.objects.filter(details__attribute__name='alert')

    if request.session.get('groups', None):
        message_list = message_list.filter(
            connection__contact__groups__in=request.session.get('groups'))

    if access:
        message_list = message_list.filter(
            connection__contact__groups__in=access.groups.all())
    (capture_status, _) = \
        Settings.objects.get_or_create(attribute='alerts')
    (rate, _) = MessageAttribute.objects.get_or_create(name='rating')

    # message_list=[Message.objects.latest('date')]
    # use more efficient count

    if request.GET.get('download', None) and access is None:
        range_form = rangeForm(request.POST)
        if range_form.is_valid():
            start = range_form.cleaned_data['startdate']
            end = range_form.cleaned_data['enddate']
            from django.core.servers.basehttp import FileWrapper

            cols = [
                "replied", "rating", "direction", "district", "date",
                "message", "id", "forwarded"
            ]
            data = AlertsExport.objects.filter(
                date__range=(start, end)).values_list(*cols).iterator()
            excel_file_path = \
                os.path.join(os.path.join(os.path.join(UREPORT_ROOT,
                                                       'static'), 'spreadsheets'),
                             'alerts.xlsx')
            ExcelResponse(data,
                          output_name=excel_file_path,
                          write_to_file=True,
                          headers=cols)
            response = HttpResponse(
                FileWrapper(open(excel_file_path)),
                content_type=
                'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
            )
            response[
                'Content-Disposition'] = 'attachment; filename=alerts.xlsx'
            from django import db

            db.reset_queries()
            response['Cache-Control'] = 'no-cache'
            return response

    if request.GET.get('search', None):
        search = request.GET.get('search')
        if search[0] == '"' and search[-1] == '"':
            search = search[1:-1]
            message_list = message_list.filter(
                Q(text__iregex=".*\m(%s)\y.*" % search)
                | Q(connection__contact__reporting_location__name__iregex=
                    ".*\m(%s)\y.*" % search)
                | Q(connection__pk__iregex=".*\m(%s)\y.*" % search))
        elif search[0] == "'" and search[-1] == "'":

            search = search[1:-1]
            message_list = message_list.filter(
                Q(text__iexact=search)
                |
                Q(connection__contact__reporting_location__name__iexact=search)
                | Q(connection__pk__iexact=search))
        elif search == "=numerical value()":
            message_list = message_list.filter(text__iregex="(-?\d+(\.\d+)?)")
        else:

            message_list = message_list.filter(
                Q(text__icontains=search)
                | Q(connection__contact__reporting_location__name__icontains=
                    search)
                | Q(connection__pk__icontains=search))

    if request.GET.get('capture', None):
        (s, _) = Settings.objects.get_or_create(attribute='alerts')
        if s.value == 'true':
            s.value = 'false'
            s.save()
            reply = 'Start Capture'
        else:
            s.value = 'true'
            s.save()
            reply = 'Stop Capture'
        return HttpResponse(reply)
    if request.GET.get('ajax', None):
        date = datetime.datetime.now() - datetime.timedelta(seconds=30)
        prev = request.session.get('prev', [])
        msgs = Message.objects.filter(
            details__attribute__name='alert',
            direction='I').filter(date__gte=date).exclude(pk__in=prev)
        if access:
            msgs = msgs.filter(
                connection__contact__groups__in=access.groups.all())
        request.session['prev'] = list(msgs.values_list('pk', flat=True))
        msgs_list = []
        if msgs:
            for msg in msgs:
                from django.template.loader import render_to_string

                can_view_number = request.user.has_perm('view_numbers')
                can_foward = request.user.has_perm('forward')
                row_rendered = \
                    render_to_string('ureport/partials/row.html',
                                     {'msg': msg, 'can_foward': can_foward, 'can_view_number': can_view_number,
                                      'assign_polls': assign_polls})

                m = {}
                m['text'] = msg.text
                m['date'] = str(msg.date.date())
                if msg.connection.contact:
                    m['name'] = msg.connection.contact.name
                else:
                    m['name'] = 'Anonymous User'
                m['number'] = msg.connection.identity
                if msg.connection.contact \
                    and msg.connection.contact.reporting_location:
                    m['district'] = \
                        msg.connection.contact.reporting_location.name
                else:
                    m['district'] = 'N/A'
                rating = msg.details.filter(attribute__name='alerts')
                if rating:
                    r = rating[0].value
                else:
                    r = 0
                m['row'] = row_rendered
                m['connection'] = msg.connection.pk
                m['pk'] = msg.pk
                msgs_list.append(m)
            return HttpResponse(mark_safe(simplejson.dumps(msgs_list)))
        else:
            return HttpResponse('success')
    if request.GET.get('rating', None):
        rating = request.GET.get('rating')
        descs = {
            '1': 'Requires Attention',
            '2': 'Moderate',
            '3': 'Important',
            '4': 'Urgent',
            '5': 'Very Urgent',
        }
        msg = Message.objects.get(pk=int(request.GET.get('msg')))
        (rate, _) = MessageAttribute.objects.get_or_create(name='rating')
        det = MessageDetail.objects.create(message=msg,
                                           attribute=rate,
                                           value=rating,
                                           description=descs.get(rating, ''))
        response = \
            """<li><a href='javascript:void(0)'  class="rate%s"

                            title="%s">%s</a></li>""" \
            % (rating, descs.get(rating, ''), descs.get(rating, ''))

        return HttpResponse(mark_safe(response))

    paginator = UreportPaginator(message_list.order_by('-date'),
                                 10,
                                 body=12,
                                 padding=2)
    page = request.GET.get('page', 1)
    try:
        messages = paginator.page(page)
    except (PageNotAnInteger, EmptyPage):

        # If page is not an integer, deliver first page.

        messages = paginator.page(1)

    return render_to_response(template, {
        'messages': messages,
        'assign_polls': assign_polls,
        'paginator': paginator,
        'capture_status': capture_status,
        'rate': rate,
        'district_form': district_form,
        'range_form': range_form,
        'groupform': groupform,
    },
                              context_instance=RequestContext(request))
Esempio n. 12
0
def home(request):
    if request.method == 'POST':
        form = CDContentForm(request.POST)
        if form.is_valid():
            temp = tempfile.TemporaryFile()
            archive = zipfile.ZipFile(temp, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
            selectedfoss = json.loads(request.POST.get('selected_foss', {}))
            all_foss_details, languages = get_all_foss_details(selectedfoss)
            eng_rec = Language.objects.get(name="English")
            for key, values in selectedfoss.iteritems():
                foss_rec = FossCategory.objects.get(pk = key)
                level = int(values[1])
                eng_flag = True
                if str(eng_rec.id) in values[0]:
                    eng_flag = False
                for value in values[0]:
                    language = Language.objects.get(pk = value)
                    src_path, dst_path = get_sheet_path(foss_rec, language, 'instruction')
                    if dst_path:
                        archive.write(src_path, dst_path)
                    src_path, dst_path = get_sheet_path(foss_rec, language, 'installation')
                    if dst_path:
                        archive.write(src_path, dst_path)
                    if level:
                        tr_recs = TutorialResource.objects.filter(Q(status = 1)|Q(status = 2), tutorial_detail__foss_id = key, tutorial_detail__level_id = level, language_id = value).order_by('tutorial_detail__level', 'tutorial_detail__order', 'language__name')
                    else:
                        tr_recs = TutorialResource.objects.filter(Q(status = 1)|Q(status = 2), tutorial_detail__foss_id = key, language_id = value).order_by('tutorial_detail__level', 'tutorial_detail__order', 'language__name')
                    rec = None
                    for rec in tr_recs:
                        if eng_flag:
                            filepath = 'videos/' + str(key) + '/' + str(rec.tutorial_detail_id) + '/' + rec.tutorial_detail.tutorial.replace(' ', '-') + "-English.srt"
                            if os.path.isfile(settings.MEDIA_ROOT + filepath):
                                archive.write(settings.MEDIA_ROOT + filepath, 'spoken/' + filepath)
                        filepath = 'videos/' + str(key) + '/' + str(rec.tutorial_detail_id) + '/' + rec.video
                        
			#Check if the side by side video for the selected language is present or not, if not, fetch default language as English
                        side_by_side_language = settings.BASE_DIR + '/media/videos/32/714/Side-by-Side-Method-%s.ogv'%(language.name)
                        if os.path.exists(side_by_side_language):
                          archive.write(settings.BASE_DIR + '/media/videos/32/714/Side-by-Side-Method-%s.ogv'%(language.name), 'spoken/Side_by_Side-Method-%s.ogv'%(language.name))
                        else:
                          archive.write(settings.BASE_DIR + '/media/side-by-side-method.ogv', 'spoken/side-by-side-method.ogv') 

                        if os.path.isfile(settings.MEDIA_ROOT + filepath):
                            archive.write(settings.MEDIA_ROOT + filepath, 'spoken/' + filepath)
                        ptr = filepath.rfind(".")
                        filepath = filepath[:ptr] + '.srt'
                        if os.path.isfile(settings.MEDIA_ROOT + filepath):
                            archive.write(settings.MEDIA_ROOT + filepath, 'spoken/' + filepath)
                        if rec.common_content.slide_status > 0:
                            filepath = 'videos/' + str(key) + '/' + str(rec.tutorial_detail_id) + '/resources/' + rec.common_content.slide
                            if os.path.isfile(settings.MEDIA_ROOT + filepath):
                                archive.write(settings.MEDIA_ROOT + filepath, 'spoken/' + filepath)
                        if rec.common_content.assignment_status > 0 and rec.common_content.assignment_status != 6:
                            filepath = 'videos/' + str(key) + '/' + str(rec.tutorial_detail_id) + '/resources/' + rec.common_content.assignment
                            if os.path.isfile(settings.MEDIA_ROOT + filepath):
                                archive.write(settings.MEDIA_ROOT + filepath, 'spoken/' + filepath)
                        if rec.common_content.code_status > 0 and rec.common_content.code_status != 6:
                            filepath = 'videos/' + str(key) + '/' + str(rec.tutorial_detail_id) + '/resources/' + rec.common_content.code
                            if os.path.isfile(settings.MEDIA_ROOT + filepath):
                                archive.write(settings.MEDIA_ROOT + filepath, 'spoken/' + filepath)
                        tutorial_path = str(rec.tutorial_detail.foss_id) + '/' + str(rec.tutorial_detail_id) + '/'
                        ctx = {
                            'tr_rec': rec,
                            'tr_recs': tr_recs,
                            'media_path': settings.MEDIA_ROOT,
                            'tutorial_path': tutorial_path,
                        }
                        watch_page = str(render(request, "cdcontent/templates/watch_tutorial.html", ctx))
                        watch_page = watch_page.replace('Content-Type: text/html; charset=utf-8', '')
                        watch_page = watch_page.strip("\n")
                        archive.writestr('spoken/videos/' + tutorial_path + 'show-video-' + rec.language.name + '.html', watch_page)
                    list_page = str(render(request, "cdcontent/templates/tutorial_search.html", {'collection': tr_recs, 'foss_details': all_foss_details, 'foss': foss_rec.id, 'lang': language.id}))
                    list_page = list_page.replace('Content-Type: text/html; charset=utf-8', '')
                    list_page = list_page.strip("\n")
                    archive.writestr('spoken/videos/' + str(foss_rec.id) + '/list-videos-' + language.name + '.html', list_page)
            home_page = str(render(request, "cdcontent/templates/home.html", {'foss_details': all_foss_details, 'foss': foss_rec.id, 'lang': language.id, 'languages': languages}))
            home_page = home_page.replace('Content-Type: text/html; charset=utf-8', '')
            home_page = home_page.strip("\n")
            archive.writestr('spoken/videos/home.html', home_page)
            archive.write(settings.BASE_DIR + '/static/spoken/css/bootstrap.min.css', 'spoken/includes/css/bootstrap.min.css')
            archive.write(settings.BASE_DIR + '/static/spoken/css/font-awesome.min.css', 'spoken/includes/css/font-awesome.min.css')
            archive.write(settings.BASE_DIR + '/static/spoken/css/main.css', 'spoken/includes/css/main.css')
            archive.write(settings.BASE_DIR + '/static/spoken/css/video-js.min.css', 'spoken/includes/css/video-js.min.css')
            archive.write(settings.BASE_DIR + '/static/spoken/images/favicon.ico', 'spoken/includes/images/favicon.ico')
            archive.write(settings.BASE_DIR + '/static/spoken/images/logo.png', 'spoken/includes/images/logo.png')
            archive.write(settings.BASE_DIR + '/static/spoken/js/jquery-1.11.0.min.js', 'spoken/includes/js/jquery-1.11.0.min.js')
            archive.write(settings.BASE_DIR + '/static/spoken/js/bootstrap.min.js', 'spoken/includes/js/bootstrap.min.js')
            archive.write(settings.BASE_DIR + '/static/spoken/js/video.js', 'spoken/includes/js/video.js')
            archive.write(settings.BASE_DIR + '/static/spoken/images/thumb-even.png', 'spoken/includes/images/thumb-even.png')
            archive.write(settings.BASE_DIR + '/static/spoken/images/Basic.png', 'spoken/includes/images/Basic.png')
            archive.write(settings.BASE_DIR + '/static/spoken/images/Intermediate.png', 'spoken/includes/images/Intermediate.png')
            archive.write(settings.BASE_DIR + '/static/spoken/images/Advanced.png', 'spoken/includes/images/Advanced.png')
            # archive.write(settings.BASE_DIR + '/media/side-by-side-method.ogv', 'spoken/side-by-side-method.ogv')
            zipdir(settings.BASE_DIR + '/static/spoken/fonts', 'spoken/includes/fonts/', archive)
            archive.write(settings.BASE_DIR + '/static/cdcontent/templates/readme.txt', 'spoken/README.txt')
            archive.write(settings.BASE_DIR + '/static/cdcontent/templates/index.html', 'spoken/index.html')
            archive.close()
            temp.seek(0)
            wrapper = FileWrapper(temp)
            response = HttpResponse(wrapper, content_type='application/zip')
            response['Content-Disposition'] = 'attachment; filename=spoken-tutorial-cdcontent.zip'
            response['Content-Length'] = temp.tell()
            return response
    else:
        form = CDContentForm()
    context = {
        'form': form
    }
    context.update(csrf(request))

    return render(request, "cdcontent/templates/cdcontent_home.html", context)
Esempio n. 13
0
def download_datafiles(request):
    """
    takes string parameter "comptype" for compression method.
    Currently implemented: "zip" and "tar"
    The datafiles to be downloaded are selected using "datafile", "dataset" or "url" parameters.
    An "expid" parameter may be supplied for use in the download archive name.  If "url" is used,
    the "expid" parameter is also used to limit the datafiles to be downloaded to a given experiment.
    """
    # Create the HttpResponse object with the appropriate headers.
    # TODO: handle no datafile, invalid filename, all http links
    # TODO: intelligent selection of temp file versus in-memory buffering.

    logger.error('In download_datafiles !!')
    comptype = "zip"
    if 'comptype' in request.POST:
        comptype = request.POST['comptype']

    if 'datafile' in request.POST or 'dataset' in request.POST:
        if (len(request.POST.getlist('datafile')) > 0 \
                or len(request.POST.getlist('dataset'))) > 0:

            datasets = request.POST.getlist('dataset')
            datafiles = request.POST.getlist('datafile')

            # Generator to produce datafiles from dataset id
            def get_dataset_datafiles(dsid):
                for datafile in Dataset_File.objects.filter(dataset=dsid):
                    if has_datafile_download_access(
                            request=request, dataset_file_id=datafile.id):
                        yield datafile

            # Generator to produce datafile from datafile id
            def get_datafile(dfid):
                datafile = Dataset_File.objects.get(pk=dfid)
                if has_datafile_download_access(request=request,
                                                dataset_file_id=datafile.id):
                    yield datafile

            # Take chained generators and turn them into a set of datafiles
            df_set = set(
                chain(
                    chain.from_iterable(map(get_dataset_datafiles, datasets)),
                    chain.from_iterable(map(get_datafile, datafiles))))
        else:
            return render_error_message(
                request,
                'No Datasets or Datafiles were selected for downloaded',
                status=404)

    elif 'url' in request.POST:
        if not len(request.POST.getlist('url')) == 0:
            return render_error_message(
                request,
                'No Datasets or Datafiles were selected for downloaded',
                status=404)

        for url in request.POST.getlist('url'):
            url = urllib.unquote(url)
            raw_path = url.partition('//')[2]
            experiment_id = request.POST['expid']
            datafile = Dataset_File.objects.filter(
                url__endswith=raw_path,
                dataset__experiment__id=experiment_id)[0]
            if has_datafile_download_access(request=request,
                                            dataset_file_id=datafile.id):
                df_set = set([datafile])
    else:
        return render_error_message(
            request,
            'No Datasets or Datafiles were selected for downloaded',
            status=404)

    logger.info('Files for archive command: %s' % df_set)

    if len(df_set) == 0:
        return render_error_message(
            request, 'You do not have download access for any of the '
            'selected Datasets or Datafiles ',
            status=403)

    rootdir = 'datasets'
    msg = _check_download_limits(rootdir, df_set, comptype)
    if msg:
        return render_error_message(request,
                                    'Requested download is too large: %s' %
                                    msg,
                                    status=403)

    # Handle missing experiment ID - only need it for naming
    try:
        expid = request.POST['expid']
    except KeyError:
        expid = iter(df_set).next().dataset.get_first_experiment().id

    if comptype == "tar":
        reader = StreamingFile(_write_tar_func(rootdir, df_set),
                               asynchronous_file_creation=True)
        response = HttpResponse(FileWrapper(reader),
                                mimetype='application/x-tar')
        response['Content-Disposition'] = \
                'attachment; filename="experiment%s-selection.tar"' % expid
    elif comptype == "zip":
        reader = StreamingFile(_write_zip_func(rootdir, df_set),
                               asynchronous_file_creation=True)
        response = HttpResponse(FileWrapper(reader),
                                mimetype='application/zip')
        response['Content-Disposition'] = \
                'attachment; filename="experiment%s-selection.zip"' % expid
    else:
        response = render_error_message(request,
                                        'Unsupported download format: %s' %
                                        comptype,
                                        status=404)
    return response
Esempio n. 14
0
def get_plist(request, **kwargs):
    app = get_app(**kwargs)
    response = HttpResponse(FileWrapper(app.plist.file),
                            content_type='text/xml')
    return response
Esempio n. 15
0
def export_logs(request):
    """ Get logs from database with a search query
    Return a csv formatted file
    """

    if request.method == 'POST':
        cluster   = Cluster.objects.get()

        date = json.loads(request.POST['date'])
        params = {
            'start'       : None,
            'length'      : None,
            'sorting'     : None,
            'type_sorting': None,
            'dataset'     : False,
            'type_logs'   : request.POST['type_logs'],
            'filter'      : {
                'startDate': date['startDate'],
                'endDate'  : date["endDate"],
            }
        }


        if request.POST['type_data'] == 'waf':
            app_id  = request.POST['app_id']

            ## Fetch the application
            app  = Application.objects.with_id(ObjectId(app_id))
            repo = app.log_custom.repository
            params['filter']['rules'] = json.loads(request.POST['rules'])
            params['filter']['app']   = {
                'name'        : str(app.name).replace(' ', '_'),
                'public_dir'  : app.public_dir,
                'public_name' : app.public_name,
                'public_alias': app.public_alias
            }

        elif request.POST['type_data'] == 'packet_filter':
            node_id = request.POST['node']
            result = {
                'max': 0,
                'data': []
            }

            node = Node.objects.with_id(ObjectId(node_id))
            repo = node.system_settings.pf_settings.repository
            params['filter']['node'] = node.name;
            try:
                params['filter']['rules'] = json.loads(request.POST[repo.type_uri])
            except:
                params['filter']['rules'] = json.loads(request.POST['rules'])

        elif request.POST['type_data'] == 'vulture':
            repo = cluster.system_settings.global_settings.repository
            params['filter']['rules'] = json.loads(request.POST['rules'])

        try:

            if repo.type_uri == 'mongodb':
                mongo_client = MongoDBClient(repo)
                result = mongo_client.search(params)

            elif repo.type_uri == 'elasticsearch':
                elastic_client = ElasticSearchClient(repo)
                result = elastic_client.search(params)

            with open('/tmp/logs.csv', 'w') as csvfile:
                writer = csv.DictWriter(csvfile, result['data'][0].keys())
                for row in result['data']:
                    if '@timestamp' in row:
                        row.pop('@timestamp')

                    if repo.type_uri == 'mongodb':
                            row['time'] = row['time'].replace(tzinfo=pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S%z")

                    writer.writerow(row)

            return JsonResponse({'status': True})
        except IndexError:
            return JsonResponse({
                "status"              : False,
                "reason"              : "Index Error:\n search results are empty"
            })
        except ClientLogException as e:
            return JsonResponse({
                "status"              : False,
                "reason"              : "Error:\n" + str(e)
            })
        except Exception:
            return JsonResponse({
                "status"              : False,
                "reason"              : "Error:\nAn error occured while exporting logs"
            })
    elif request.method == 'GET':
        wrapper      = FileWrapper(open('/tmp/logs.csv'))
        content_type = mimetypes.guess_type('/tmp/logs.csv')[0]
        response     = HttpResponse(wrapper,content_type=content_type)
        response['Content-Length']      = os.path.getsize('/tmp/logs.csv')
        response['Content-Disposition'] = "attachment; filename=logs.csv"
        return response
Esempio n. 16
0
def get_wrapped_file(src_filename):
    prefix = os.path.dirname(os.path.realpath(__file__))
    src_filename = prefix + "/backups/%s" % src_filename
    return FileWrapper(open(src_filename, "rb")), os.path.getsize(src_filename)
Esempio n. 17
0
def download_datafiles(request):

    # Create the HttpResponse object with the appropriate headers.
    # TODO: handle no datafile, invalid filename, all http links
    # (tarfile count?)
    expid = request.POST['expid']
    fileString = ''

    comptype = "zip"
    if 'comptype' in request.POST:
        comptype = request.POST['comptype']

    if 'datafile' in request.POST:
        if len(request.POST.getlist('datafile')) > 500:
            comptype = "tar"

    if 'dataset' in request.POST:
        comptype = "tar"  #todo quickfix, calc how many files

    # the following protocols can be handled by this module
    protocols = ['', 'file', 'tardis']
    known_protocols = len(protocols)
    if 'datafile' in request.POST or 'dataset' in request.POST:
        if (len(request.POST.getlist('datafile')) > 0 \
                or len(request.POST.getlist('dataset'))) > 0:

            datasets = request.POST.getlist('dataset')
            datafiles = request.POST.getlist('datafile')

            for dsid in datasets:
                for datafile in Dataset_File.objects.filter(dataset=dsid):
                    if has_datafile_access(request=request,
                                           dataset_file_id=datafile.id):
                        p = datafile.protocol
                        if not p in protocols:
                            protocols += [p]
                        absolute_filename = datafile.url.partition('//')[2]
                        if (datafile.url.partition('//')[0] == 'tardis:'):
                            #temp fix for old data
                            filepath = '%s/%s/%s' %\
                            (expid, str(datafile.dataset.id),
                                absolute_filename)

                            print filepath + "######"

                            try:
                                wrapper = FileWrapper(file(
                                    datafile.get_absolute_filepath()))\
                                #exists test. os.exists broken

                            except IOError:
                                print "OLD FILE DETECTED"
                                filepath = '%s/%s' % (expid, absolute_filename)

                            fileString += ('\"' + filepath + '\" ')
                            print fileString
                        else:
                            fileString += '\"%s/%s\" ' %\
                            (expid, absolute_filename)

            for dfid in datafiles:
                datafile = Dataset_File.objects.get(pk=dfid)
                if datafile.dataset.id in datasets:
                    continue
                if has_datafile_access(request=request,
                                       dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition('//')[2]
                    if (datafile.url.partition('//')[0] == 'tardis:'):
                        #temp fix for old data
                        filepath = '\"%s/%s/%s\" ' %\
                        (expid, str(datafile.dataset.id),
                            absolute_filename)

                        print filepath + "######"

                        try:
                            wrapper = FileWrapper(file(
                                datafile.get_absolute_filepath()))\
                            #exists test. os.exists broken

                        except IOError:
                            print "OLD FILE DETECTED"
                            filepath = '\"%s/%s\" ' %\
                                       (expid, absolute_filename)

                        fileString += filepath
                        print fileString
                    else:
                        fileString += '\"%s/%s\" ' % (expid, absolute_filename)
        else:
            return return_response_not_found(request)

    elif 'url' in request.POST:
        if not len(request.POST.getlist('url')) == 0:
            comptype = "tar"  #todo quickfix for zip error
            fileString = ""
            for url in request.POST.getlist('url'):
                url = urllib.unquote(url)
                raw_path = url.partition('//')[2]
                experiment_id = request.POST['expid']
                datafile = Dataset_File.objects.filter(
                    url__endswith=raw_path,
                    dataset__experiment__id=experiment_id)[0]
                if has_datafile_access(request=request,
                                       dataset_file_id=datafile.id):
                    p = datafile.protocol
                    if not p in protocols:
                        protocols += [p]
                    absolute_filename = datafile.url.partition('//')[2]
                    if (datafile.url.partition('//')[0] == 'tardis:'):
                        # expects tardis: formatted stuff
                        # to not include dataset id

                        #temp fix for old data
                        filepath = '\"%s/%s/%s\" ' %\
                            (expid, str(datafile.dataset.id),
                            absolute_filename)

                        print filepath + "######"

                        try:
                            wrapper = FileWrapper(file(
                                datafile.get_absolute_filepath()))\
                            #exists test. os.exists broken

                        except IOError:
                            print "OLD FILE DETECTED"
                            filepath = '\"%s/%s\" ' %\
                                       (expid, absolute_filename)

                        fileString += ('\"' + filepath + '\" ')
                        print fileString
                    else:
                        fileString += '\"%s/%s\" ' % (expid, absolute_filename)
        else:
            return return_response_not_found(request)
    else:
        return return_response_not_found(request)

    # more than one external download location?
    if len(protocols) > known_protocols + 2:
        response = HttpResponseNotFound()
        response.write('<p>Different locations selected!</p>\n')
        response.write('Please limit your selection and try again.\n')
        return response

    # redirect request if another (external) download protocol was found
    elif len(protocols) == known_protocols + 1:
        from django.core.urlresolvers import reverse, resolve
        try:
            for module in settings.DOWNLOAD_PROVIDERS:
                if module[0] == protocols[3]:
                    url = reverse('%s.download_datafiles' % module[1])
                    view, args, kwargs = resolve(url)
                    kwargs['request'] = request
                    return view(*args, **kwargs)
        except:
            return return_response_not_found(request)

    else:
        # tarfile class doesn't work on large files being added and
        # streamed on the fly, so going command-line-o
        if not fileString:
            return return_response_error(request)

        if comptype == "tar":
            cmd = 'tar -C %s -c %s' % (settings.FILE_STORE_PATH, fileString)

            # logger.info(cmd)
            response = \
                HttpResponse(FileWrapper(subprocess.Popen(
                                                    cmd,
                                                    stdout=subprocess.PIPE,
                                                    shell=True).stdout),
                             mimetype='application/x-tar')
            response['Content-Disposition'] = \
                    'attachment; filename="experiment%s-selection.tar"' % expid
            return response
        else:
            cmd = 'cd %s; zip -r - %s' % (settings.FILE_STORE_PATH, fileString)
            # logger.info(cmd)
            response = \
                HttpResponse(FileWrapper(subprocess.Popen(
                                                    cmd,
                                                    stdout=subprocess.PIPE,
                                                    shell=True).stdout),
                             mimetype='application/zip')
            response['Content-Disposition'] = \
                    'attachment; filename="experiment%s-selection.zip"' % expid
            return response
Esempio n. 18
0
def stv_count(request):

    context = {'menu_active': 'home'}
    session = request.session.get('stvcount', {})
    results_generated = context['results'] = session.get('results', {})
    el_data = None

    do_count = True
    if request.GET.get('form', None):
        do_count = False
        from zeus.forms import STVElectionForm, STVBallotForm
        form = STVElectionForm()

        ballots_form = None
        if request.method == "POST":
            form = STVElectionForm(request.POST, disabled=False)
            if form.is_valid():
                candidates = form.get_candidates()

                class F(STVBallotForm):
                    pass

                setattr(F, 'candidates', candidates)
                formset_count = int(form.cleaned_data.get('ballots_count'))
                if not request.POST.get('submit_ballots', False):
                    BallotsForm = formset_factory(F,
                                                  extra=formset_count,
                                                  max_num=formset_count)
                    ballots_form = BallotsForm()
                else:
                    BallotsForm = formset_factory(F,
                                                  extra=0,
                                                  max_num=formset_count)
                    ballots_form = BallotsForm(request.POST)
                    if ballots_form.is_valid():
                        el = form.get_data()
                        for i, b in enumerate(ballots_form):
                            choices = b.get_choices(i + 1)
                            if not choices.get('votes'):
                                continue
                            el['ballots'].append(b.get_choices(i + 1))
                        el_data = el
                        do_count = True
                    else:
                        context['error'] = _("Invalid ballot data")

        context['import'] = 1
        context['form'] = form
        context['ballots_form'] = ballots_form

    if request.GET.get('reset', None):
        del request.session['stvcount']
        return HttpResponseRedirect(reverse('stv_count'))

    if request.GET.get('download', None) and results_generated:
        filename = results_generated.get(request.GET.get('download', 'pdf'),
                                         '/nofile')
        if not os.path.exists(filename):
            return HttpResponseRedirect(reverse('stv_count') + "?reset=1")

        wrapper = FileWrapper(file(filename))
        response = HttpResponse(wrapper, content_type='application/pdf')
        response[
            'Content-Disposition'] = 'attachment; filename=%s' % os.path.basename(
                filename)
        response['Content-Length'] = os.path.getsize(filename)
        return response

    if request.method == "POST" and do_count:
        el_data = el_data or json.loads(request.FILES.get('data').read())
        _uuid = str(uuid.uuid4())
        files = stv_count_and_report(_uuid, el_data)
        json_file = os.path.join('/tmp', 'json-stv-results-%s' % _uuid)
        with file(json_file, 'w') as f:
            f.write(json.dumps(el_data, ensure_ascii=False).encode('utf8'))
        files.append(('json', json_file))
        session['results'] = dict(files)
        request.session['stvcount'] = session
        return HttpResponseRedirect(reverse('stv_count'))

    request.session['stvcount'] = session
    return render_template(request, "zeus/stvcount", context)
Esempio n. 19
0
def handler(request):
    if request.method == "POST":
        try:
            result = handle_uploaded_file(request, request.FILES["newfile"])
            return HttpResponse('<textarea>' + encode_json(result) +
                                '</textarea>')
        except:
            type, value, tb = sys.exc_info()
            print >> sys.stderr, type.__name__, ":", value
            print >> sys.stderr, '\n'.join(traceback.format_tb(tb))
    else:
        if request.GET["mode"] == "getinfo":
            return HttpResponse(getInfo(request, request.GET["path"]))

        if request.GET["mode"] == "getfolder":
            result = []
            d = urllib.unquote(PROJECT_DIR + request.GET["path"])
            request.session["upload_path"] = request.GET["path"]
            result += " { "
            for i, filename in enumerate(os.listdir(d)):
                if filename != ".svn" and filename != ".DS_Store":
                    result += '"' + request.GET["path"] + filename + '" : '
                    result += getInfo(request, request.GET["path"] + filename)
                    if i < (len(os.listdir(d)) - 1):
                        result += " , "
            result += " } "
            return HttpResponse(result)

        if request.GET["mode"] == "rename":
            old = PROJECT_DIR + request.GET["old"]
            path = split_path(old)[0]

            oldname = split_path(old)[-1]

            if os.path.isdir(old + "/"):
                old += '/'

            newname = request.GET["new"]
            newpath = path + '/' + newname

            try:
                print "old:" + split_path(old)[0].replace(PROJECT_DIR, "")
                print "newpath:" + split_path(newpath)[0].replace(
                    PROJECT_DIR, "")
                os.rename(old, newpath)
                error_message = newname
                success_code = "0"
            except:
                type, value, tb = sys.exc_info()
                print >> sys.stderr, type.__name__, ":", value
                print >> sys.stderr, '\n'.join(traceback.format_tb(tb))
                success_code = "500"
                error_message = _('There was an error renaming the file.')

            if os.path.isdir(newpath + "/"):
                newpath += '/'

            result = {
                'Old Path': split_path(old)[0].replace(PROJECT_DIR, "") + "/",
                'Old Name': oldname,
                'New Path':
                split_path(newpath)[0].replace(PROJECT_DIR, "") + "/",
                'New Name': newname,
                'Error': error_message,
                'Code': success_code
            }

            return HttpResponse(encode_json(result))

        if request.GET["mode"] == "delete":
            fullpath = PROJECT_DIR + request.GET["path"]
            if os.path.isdir(fullpath + "/"):
                if not fullpath[-1] == '/':
                    fullpath += '/'

            try:
                directory = split_path(fullpath)[0]
                name = split_path(fullpath)[-1]
                os.remove(fullpath)
                error_message = _('%(name)s was deleted successfully.') % {
                    'name': name
                }
                success_code = "0"
            except:
                error_message = _(
                    'There was an error deleting the file. <br/> The operation was either not permitted or it may have already been deleted.'
                )
                success_code = "500"

            result = {
                'Path': fullpath.replace(PROJECT_DIR, ""),
                'Name': name,
                'Error': error_message,
                'Code': success_code
            }

            return HttpResponse(encode_json(result))

        if request.GET["mode"] == "addfolder":
            path = PROJECT_DIR + request.GET["path"]
            newName = request.GET["name"].replace(" ", "_")

            newPath = path + newName + "/"

            if path_exists(path):
                try:
                    os.mkdir(newPath)
                    success_code = "0"
                    error_message = _('Successfully created folder.')
                except:
                    error_message = _(
                        'There was an error creating the directory.')
                    success_code = "500"
            else:
                success_code = "500"
                error_message = _('There is no Root Directory.')

            result = {
                'Path': request.GET["path"],
                'Parent': request.GET["path"],
                'Name': newName,
                'New Path': newPath,
                'Error': error_message,
                'Code': success_code
            }
            return HttpResponse(encode_json(result))

        if request.GET["mode"] == "download":
            abspath = PROJECT_DIR + request.GET["path"]
            wrapper = FileWrapper(file(abspath))
            response = HttpResponse(wrapper)
            response['Content-Length'] = os.path.getsize(abspath)
            response[
                'Content-Disposition'] = 'attachment; filename=%s' % split_path(
                    abspath)[-1]
            return response

    return HttpResponse("failed")
Esempio n. 20
0
def downloadImage(request, result_id, band):
    if request.method == 'GET':
        resultImg = models.QueryResult.objects.filter(pk=result_id).first()
        if resultImg == None:
            raise Http404

        tiles = list(resultImg.tileMatrix)

        src_srs = osr.SpatialReference()
        src_srs.ImportFromWkt(tiles[0].image.wkt)
        tgt_srs = osr.SpatialReference()
        tgt_srs.ImportFromEPSG(4326)
        tgt_srs = src_srs.CloneGeogCS()

        preClipDS, preClipSize, preClipGeoTransform = GetPreClipImage(
            tiles, band, resultImg.imageName, src_srs, tgt_srs)
        # print preClipGeoTransform

        # Raster of input polygons
        rasterPoly = Image.new("L", (preClipSize[0], preClipSize[1]), 1)
        rasterize = ImageDraw.Draw(rasterPoly)

        inputPolygons = resultImg.inputPolygons.polygons

        mostULx = mostLRx = mostULy = mostLRy = None

        for polygon in inputPolygons:
            pixels = []
            inputPolygonReprojected = ReprojectCoords(
                polygon['coordinates'][0], tgt_srs, src_srs)
            for p in inputPolygonReprojected:
                pixels.append(world2Pixel(preClipGeoTransform, p[0], p[1]))

            pixels = intersectPolygonToBorder(pixels, preClipSize[0],
                                              preClipSize[1])
            if pixels is None:
                continue

            print pixels

            if mostULx == None or   \
                mostLRx == None or  \
                mostULy == None or  \
                mostLRy == None:

                mostULx = mostLRx = pixels[0][0]
                mostULy = mostLRy = pixels[0][1]

            for x, y in pixels:
                if x > mostLRx:
                    mostLRx = x
                if x < mostULx:
                    mostULx = x
                if y < mostULy:
                    mostULy = y
                if y > mostLRy:
                    mostLRy = y

            # mostULx, mostULy = world2Pixel(preClipGeoTransform, mostULx, mostULy)
            # mostLRx, mostLRy = world2Pixel(preClipGeoTransform, mostLRx, mostLRy)

            mostULx = 0 if mostULx < 0 else mostULx
            mostLRx = 0 if mostLRx < 0 else mostLRx
            mostULy = 0 if mostULy < 0 else mostULy
            mostLRy = 0 if mostLRy < 0 else mostLRy

            rasterize.polygon(pixels, 0)

        print '%i %i %i %i' % (mostULx, mostULy, mostLRx, mostLRy)

        # clipped the output dataset by minimum rect
        clip = preClipDS.GetRasterBand(1).ReadAsArray(
            0, 0, preClipSize[0], preClipSize[1])[mostULy:mostLRy,
                                                  mostULx:mostLRx]

        # create mask to clip image by polygon
        mask = imageToArray(rasterPoly)[mostULy:mostLRy, mostULx:mostLRx]

        # Clip the image using the mask
        clip = gdalnumeric.choose(mask, (clip, 0)).astype(gdalnumeric.uint16)

        finalFile = NamedTemporaryFile(suffix='.tif',
                                       prefix=resultImg.imageName + '-' +
                                       str(band))
        gdalnumeric.SaveArray(clip, str(finalFile.name), format="GTiff")

        clippedGeoTransform = [
            preClipGeoTransform[0] + mostULx * preClipGeoTransform[1],
            preClipGeoTransform[1], preClipGeoTransform[2],
            preClipGeoTransform[3] + mostULy * preClipGeoTransform[5],
            preClipGeoTransform[4], preClipGeoTransform[5]
        ]

        ds = gdal.Open(str(finalFile.name), gdal.GA_Update)
        ds.SetGeoTransform(clippedGeoTransform)
        ds.SetProjection(src_srs.ExportToWkt())

        # Return HttpResponse Image
        wrapper = FileWrapper(finalFile)
        content_type = mimetypes.guess_type(finalFile.name)[0]
        response = StreamingHttpResponse(wrapper, content_type='content_type')
        response[
            'Content-Disposition'] = "attachment; filename=%s" % finalFile.name

        return response

        # return HttpResponse(json.dumps(dict(out=output_geo_transform,
        #     ext=ext,
        #     finalXSize=finalXSize,
        #     finalYSize=finalYSize)))

    raise Http404
Esempio n. 21
0
def newgame(request):
    if request.method != 'POST':
        teams = Team.objects.all()
        context = {'teams': teams}
        return render(request, 'sbleague/newgame.html', context)

    else:

        teams = Team.objects.all()

        home_teamID = request.POST.get("hometeamID", "")
        away_teamID = request.POST.get("awayteamID", "")

        hometeam = Team.objects.get(pk=home_teamID)
        awayteam = Team.objects.get(pk=away_teamID)

        homeplayer = Member.objects.filter(team=home_teamID).order_by("number")
        awayplayer = Member.objects.filter(team=away_teamID).order_by("number")

        date = request.POST.get("date", "")
        location = request.POST.get("location", "")
        game_id = request.POST.get("game_id", "")

        #############################################################################
        away_record = request.POST.get("away_rd", "")
        home_record = request.POST.get("home_rd", "")

        record = None
        record_err = ""
        # ===== record parser
        if (len(away_record) and len(home_record)):
            awayteam_name = awayteam.name.encode('utf8')[0:6]
            hometeam_name = hometeam.name.encode('utf8')[0:6]
            away_table = text_to_table(away_record.encode('utf8'))
            home_table = text_to_table(home_record.encode('utf8'))
            record, record_err = parse_game_record(awayteam_name, None,
                                                   away_table, hometeam_name,
                                                   None, home_table)

            record.game_type = "台大慢壘聯盟"
            record.date = date
            record.location = location
            record.game_id = game_id
            record.away.raw_record = away_record.encode('utf8')
            record.home.raw_record = home_record.encode('utf8')
        else:
            if (len(away_record) == 0):
                record_err = "Away 沒有記錄"
            else:
                record_err = "Home 沒有記錄"


#############################################################################

        if (date == u''):
            err_message = "請輸入日期"
            context = {
                'teams': teams,
                'awayteam': awayteam,
                'hometeam': hometeam,
                'date': date,
                'location': location,
                'game_id': game_id,
                'away_record': away_record,
                'home_record': home_record,
                'warning': err_message
            }

            return render(request, 'sbleague/newgame.html', context)

        if (game_id == u''):
            err_message = "請輸入場次編號"
            context = {
                'teams': teams,
                'awayteam': awayteam,
                'hometeam': hometeam,
                'date': date,
                'location': location,
                'game_id': game_id,
                'away_record': away_record,
                'home_record': home_record,
                'warning': err_message
            }

            return render(request, 'sbleague/newgame.html', context)

        game_exist = True
        try:
            new = Game.objects.get(gameID=game_id)

        except Game.DoesNotExist:  # --- add new game

            game_exist = False

            max_batter_nums = 25
            max_pitcher_nums = 5

            if (record != None and record_err == ""):
                # --- append batter_num to 25 and pitcher_num to 5
                if (record.away.nBatters < max_batter_nums):
                    for i in range(max_batter_nums - record.away.nBatters):
                        record.away.batters.append(rdBatter())

                if (record.home.nBatters < max_batter_nums):
                    for i in range(max_batter_nums - record.home.nBatters):
                        record.home.batters.append(rdBatter())

                if (record.away.nPitchers < max_pitcher_nums):
                    for i in range(max_pitcher_nums - record.away.nPitchers):
                        record.away.pitchers.append(rdPitcher())

                if (record.home.nPitchers < max_pitcher_nums):
                    for i in range(max_pitcher_nums - record.home.nPitchers):
                        record.home.pitchers.append(rdPitcher())

        if (game_exist):
            err_message = "重複的場次編號"
            context = {
                'teams': teams,
                'awayteam': awayteam,
                'hometeam': hometeam,
                'date': date,
                'location': location,
                'game_id': game_id,
                'away_record': away_record,
                'home_record': home_record,
                'warning': err_message
            }

            return render(request, 'sbleague/newgame.html', context)

        # === record error
        if (record_err != ""):
            err_message = record_err
            context = {
                'teams': teams,
                'awayteam': awayteam,
                'hometeam': hometeam,
                'date': date,
                'location': location,
                'game_id': game_id,
                'away_record': away_record,
                'home_record': home_record,
                'warning': err_message
            }

            return render(request, 'sbleague/newgame.html', context)

        # === success add new game
        if 'send' in request.POST:  # --- send data
            context = {
                'hometeam': hometeam,
                'awayteam': awayteam,
                'homeplayer': homeplayer,
                'awayplayer': awayplayer,
                'date': date,
                'location': location,
                'game_id': game_id,
                'home_away': range(2),
                'max_batter_nums': max_batter_nums,
                'max_pitcher_nums': max_pitcher_nums,
                'record': record
            }

            return render(request, 'sbleague/newgame_detail.html', context)

        elif 'download' in request.POST:

            filename = '%d.txt' % int(game_id)
            filepath = 'sbleague/static/txt/%s' % filename
            with open(filepath, 'w') as f:
                f.write(record.post_ptt)
                print "save %s" % filepath

            response = HttpResponse(
                FileWrapper(file(filepath)),
                content_type=mimetypes.guess_type(filepath)[0])
            response[
                'Content-Disposition'] = 'attachment; filename=%s' % filename
            response['Content-Length'] = os.path.getsize(filepath)

            return response

        else:  # --- preview
            err_message = "preview"
            context = {
                'teams': teams,
                'awayteam': awayteam,
                'hometeam': hometeam,
                'date': date,
                'location': location,
                'game_id': game_id,
                'away_record': away_record,
                'home_record': home_record,
                'warning': err_message,
                'record': record,
                'preview': True
            }

            return render(request, 'sbleague/newgame.html', context)
Esempio n. 22
0
def export(request, pk):
    requestProtocol = Protocol.objects.get(id=pk)

    layers = request.GET['layers'].split(' ')

    if len(layers) > 1:
        format = layers[1]
        layerType = layers[0]

    else:
        format = 'fasta'
        layerType = ''.join(layers)

    if layerType == 'target':
        nodes = [
            node for node in Node.objects.filter(
                protocol=requestProtocol).filter(isRoot=True)
        ]
        for node in nodes:
            if [item.name for item in nodes].count(node.name) > 1:
                for checkNode in nodes[nodes.index(node) + 1:]:
                    if checkNode.name == node.name:
                        nodes.remove(checkNode)

    elif layerType == 'oligos':
        nodes = [
            node for node in Node.objects.filter(
                protocol=requestProtocol).filter(isLeaf=True)
        ]

        for node in nodes:
            for checkNode in nodes[nodes.index(node) + 1:]:
                if checkNode.name == node.name or checkNode.target == node.target:
                    nodes.remove(checkNode)

    elif layerType == 1:
        layers = request.GET['layers']
        nodes = [
            node for node in Node.objects.filter(
                protocol=requestProtocol).filter(treeDepth=layers)
        ]
        for node in nodes:
            if [item.name for item in nodes].count(node.name) > 1:
                for checkNode in nodes[nodes.index(node) + 1:]:
                    if checkNode.name == node.name:
                        nodes.remove(checkNode)

    else:
        layers = request.GET['layers'].split(' ')[0].split('-')
        layers = [(int)(num) for num in layers]
        layers = range(min(layers), max(layers) + 1)
        nodes = [
            node for node in Node.objects.filter(
                protocol=requestProtocol).filter(treeDepth__in=layers)
        ]

        for node in nodes:
            if [item.name for item in nodes].count(node.name) > 1:
                for checkNode in nodes[nodes.index(node):]:
                    if checkNode.name == node.name and [
                            item.name for item in nodes
                    ].count(node.name) > 1:
                        nodes.remove(checkNode)

    if len(
            request.GET['layers'].split(' ')
    ) > 2:  # filter out nodes that aren't a part of the desired target (V1, V2, etc.)
        protocol = request.GET['layers'].split(' ')[2]
        nodesAltered = nodes
        for node in nodes:
            print node
            if node.name.count('Common') == 0 and node.name.count(
                    str(protocol)) == 0:
                nodesAltered.remove(node)
                print len(nodesAltered)
        nodes = nodesAltered
        print nodes, len(nodes)

    seqList = []
    primerList = []
    for node in nodes:
        seq = Seq(node.target, generic_dna)
        seqrecord = SeqRecord(seq, id=node.name)
        seqList.append(seqrecord)

        if layerType == 'oligos':
            if len(node.leftParentNode.all()) > 0:
                primer = Seq(
                    node.leftParentNode.all()[0].forwardPrimer.sequence,
                    generic_dna)
                record = SeqRecord(primer, id=node.name + '_f_prm')

            else:
                primer = Seq(
                    node.rightParentNode.all()[0].reversePrimer.sequence,
                    generic_dna)
                record = SeqRecord(primer, id=node.name + '_r_prm')

            primerList.append(record)

        elif not (node.isLeaf):
            fprimer = Seq(node.forwardPrimer.sequence, generic_dna)
            frecord = SeqRecord(fprimer, id=node.name + '_Primer_Forward')
            rprimer = Seq(node.reversePrimer.sequence, generic_dna)
            rrecord = SeqRecord(rprimer, id=node.name + '_Primer_Reverse')
            primerList.append(frecord)
            primerList.append(rrecord)

    if format == 'plain':
        seqFilename = 'data/' + str(requestProtocol.name) + '_sequences.txt'
        prmFilename = 'data/' + str(requestProtocol.name) + '_primers.txt'
        seqPath = os.path.join(os.getcwd(), seqFilename)
        prmPath = os.path.join(os.getcwd(), prmFilename)
        seqOutputHandle = open(seqPath, 'w')
        prmOutputHandle = open(prmPath, 'w')

        for rec in seqList:
            seqOutputHandle.write(rec.id + '\t' + str(rec.seq) + '\t' +
                                  str(len(str(rec.seq))) + '\n')

        seqOutputHandle.close()

        for rec in primerList:
            prmOutputHandle.write(rec.id + '\t' + str(rec.seq) + '\t' +
                                  str(len(str(rec.seq))) + '\n')

        prmOutputHandle.close()

    else:
        seqFilename = 'data/' + str(requestProtocol.name) + '_sequences.fasta'
        prmFilename = 'data/' + str(requestProtocol.name) + '_primers.fasta'
        seqPath = os.path.join(os.getcwd(), seqFilename)
        prmPath = os.path.join(os.getcwd(), prmFilename)
        seqOutputHandle = open(seqPath, 'w')
        prmOutputHandle = open(prmPath, 'w')

        SeqIO.write(seqList, seqOutputHandle, 'fasta')
        SeqIO.write(primerList, prmOutputHandle, 'fasta')
        seqOutputHandle.close()
        prmOutputHandle.close()

    zipName = 'data/' + str(requestProtocol.name) + '_' + layerType + '.zip'
    zipName = os.path.join(os.getcwd(), zipName)
    zip = zipfile.ZipFile(zipName, mode='w')
    zip.write(seqPath, seqFilename)
    zip.write(prmPath, prmFilename)
    zip.close()

    wrapper = FileWrapper(file(zipName))
    response = HttpResponse(wrapper, content_type='application/octet-stream')
    response['Content-Length'] = os.path.getsize(zipName)
    response[
        'Content-Disposition'] = 'attachment; filename="protocol_' + zipName[
            6:] + '"'
    return response
Esempio n. 23
0
def get_wrapped_file(src_filename):
    prefix = get_backup_prefix()
    src_filename = prefix + "/backups/%s" % src_filename
    return FileWrapper(open(src_filename, "rb")), os.path.getsize(src_filename)
Esempio n. 24
0
def home(request):
    if request.method == 'POST':
        form = CDContentForm(request.POST)

        if form.is_valid():
            temp = tempfile.TemporaryFile()
            archive = zipfile.ZipFile(temp,
                                      'w',
                                      zipfile.ZIP_DEFLATED,
                                      allowZip64=True)
            selectedfoss = json.loads(request.POST.get('selected_foss', {}))
            all_foss_details = get_all_foss_details(selectedfoss)
            eng_rec = Language.objects.get(name="English")
            languages = set()

            for key, values in selectedfoss.iteritems():
                foss_rec = FossCategory.objects.get(pk=key)
                level = int(values[1])
                eng_flag = True
                srt_files = set()
                common_files = set()

                if str(eng_rec.id) in values[0]:
                    eng_flag = False

                t_resource_qs = TutorialResource.objects.filter(
                    Q(status=1) | Q(status=2), tutorial_detail__foss_id=key)

                if level:
                    t_resource_qs = t_resource_qs.filter(
                        tutorial_detail__level_id=level)

                for value in values[0]:
                    language = Language.objects.get(pk=value)
                    add_sheets(archive, foss_rec, language)

                    tr_recs = t_resource_qs.filter(language_id=value).order_by(
                        'tutorial_detail__level', 'tutorial_detail__order',
                        'language__name')

                    languages.add(language.name)

                    for rec in tr_recs:
                        filepath = 'videos/{}/{}/{}'.format(
                            key, rec.tutorial_detail_id, rec.video)

                        if os.path.isfile(settings.MEDIA_ROOT + filepath):
                            archive.write(settings.MEDIA_ROOT + filepath,
                                          'spoken/' + filepath)

                        # add srt file to archive
                        add_srt_file(archive, rec, filepath, eng_flag,
                                     srt_files)

                        # collect common files
                        collect_common_files(rec, common_files)

                        tutorial_path = '{}/{}/'.format(
                            rec.tutorial_detail.foss_id,
                            rec.tutorial_detail_id)
                        filepath = 'spoken/videos/{}show-video-{}.html'.format(
                            tutorial_path, rec.language.name)
                        ctx = {
                            'tr_rec': rec,
                            'tr_recs': tr_recs,
                            'media_path': settings.MEDIA_ROOT,
                            'tutorial_path': tutorial_path
                        }
                        convert_template_to_html_file(
                            archive, filepath, request,
                            "cdcontent/templates/watch_tutorial.html", ctx)

                    filepath = 'spoken/videos/' + str(
                        foss_rec.id
                    ) + '/list-videos-' + language.name + '.html'
                    ctx = {
                        'collection': tr_recs,
                        'foss_details': all_foss_details,
                        'foss': foss_rec.id,
                        'lang': language.id
                    }
                    convert_template_to_html_file(
                        archive, filepath, request,
                        "cdcontent/templates/tutorial_search.html", ctx)

                # add common files for current foss
                add_common_files(archive, common_files)

            # add side-by-side tutorials for selected languages
            languages = add_side_by_side_tutorials(archive, languages)

            ctx = {
                'foss_details': all_foss_details,
                'foss': foss_rec.id,
                'lang': language.id,
                'languages': languages
            }
            convert_template_to_html_file(archive, 'spoken/videos/home.html',
                                          request,
                                          "cdcontent/templates/home.html", ctx)

            # add all required static files to archive
            add_static_files(archive)
            archive.close()

            temp.seek(0)
            wrapper = FileWrapper(temp)
            response = HttpResponse(wrapper, content_type='application/zip')
            response[
                'Content-Disposition'] = 'attachment; filename=spoken-tutorial-cdcontent.zip'
            response['Content-Length'] = temp.tell()
            return response
    else:
        form = CDContentForm()
    context = {'form': form}
    context.update(csrf(request))

    return render(request, "cdcontent/templates/cdcontent_home.html", context)
Esempio n. 25
0
def tasks_procmemory(request, task_id, pid="all"):
    if request.method != "GET":
        resp = {"error": True, "error_value": "Method not allowed"}
        return jsonize(resp, response=True)

    if not apiconf.procmemory.get("enabled"):
        resp = {
            "error": True,
            "error_value": "Process memory download API is disabled"
        }
        return jsonize(resp, response=True)

    check = validate_task(task_id)
    if check["error"]:
        return jsonize(check, response=True)
    # Check if any process memory dumps exist
    srcdir = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % task_id,
                          "memory")
    if not os.path.exists(srcdir):
        resp = {"error": True, "error_value": "No memory dumps saved"}
        return jsonize(resp, response=True)

    if pid == "all":
        if not apiconf.taskprocmemory.get("all"):
            resp = {
                "error":
                True,
                "error_value":
                "Downloading of all process memory dumps "
                "is disabled"
            }
            return jsonize(resp, response=True)

        fname = "%s_procdumps.tar.bz2" % task_id
        s = StringIO()
        tar = tarfile.open(fileobj=s, mode="w:bz2")
        for memdump in os.listdir(srcdir):
            tar.add(os.path.join(srcdir, memdump), arcname=memdump)
        tar.close()
        resp = HttpResponse(s.getvalue(),
                            content_type="application/octet-stream;")
        resp["Content-Length"] = str(len(s.getvalue()))
        resp["Content-Disposition"] = "attachment; filename=" + fname
    else:
        srcfile = srcdir + "/" + pid + ".dmp"
        if os.path.exists(srcfile):
            if apiconf.taskprocmemory.get("compress"):
                fname = srcfile.split("/")[-1]
                s = StringIO()
                tar = tarfile.open(fileobj=s, mode="w:bz2")
                tar.add(srcfile, arcname=fname)
                tar.close()
                resp = HttpResponse(s.getvalue(),
                                    content_type="application/octet-stream;")
                archive = "%s-%s_dmp.tar.bz2" % (task_id, pid)
                resp["Content-Length"] = str(len(s.getvalue()))
                resp["Content-Disposition"] = "attachment; filename=" + archive
            else:
                mime = "application/octet-stream"
                fname = "%s-%s.dmp" % (task_id, pid)
                resp = StreamingHttpResponse(FileWrapper(open(srcfile), 8096),
                                             content_type=mime)
                # Specify content length for StreamingHTTPResponse
                resp["Content-Length"] = os.path.getsize(srcfile)
                resp["Content-Disposition"] = "attachment; filename=" + fname
        else:
            resp = {
                "error":
                True,
                "error_value":
                "Process memory dump does not exist for "
                "pid %s" % pid
            }
            return jsonize(resp, response=True)

    return resp
Esempio n. 26
0
def exportData(shapefile):

    dstDir = tempfile.mkdtemp()
    dstFile = str(os.path.join(dstDir, shapefile.filename))

    dstSpatialRef = osr.SpatialReference()
    dstSpatialRef.ImportFromWkt(shapefile.srs_wkd)

    driver = ogr.GetDriverByName("ESRI Shapefile")
    datasource = driver.CreateDataSource(dstFile)
    layer = datasource.CreateLayer(str(shapefile.filename), dstSpatialRef)

    # find the spatial reference
    srcSpatialRef = osr.SpatialReference()
    srcSpatialRef.ImportFromEPSG(4326)
    coordTransform = osr.CoordinateTransformation(srcSpatialRef, dstSpatialRef)

    geomField = utils.calcGeometryField(shapefile.geom_type)

    # Define layer attributes
    for attr in shapefile.attribute_set.all():
        field = ogr.FieldDefn(str(attr.name), attr.type)
        field.SetWidth(attr.width)
        field.SetPrecision(attr.precision)
        layer.CreateField(field)

    # Add all the (Single type) geometries for this layer
    for feature in shapefile.feature_set.all():

        # Geometry is a django construct inherited from GeometryField

        geometry = getattr(feature, geomField)
        geometry = utils.unwrapGEOSGeometry(geometry)

        dstGeometry = ogr.CreateGeometryFromWkt(geometry.wkt)
        dstGeometry.Transform(coordTransform)

        dstFeature = ogr.Feature(layer.GetLayerDefn())
        dstFeature.SetGeometry(dstGeometry)

        # add in the feature's attributes
        for attrValue in feature.attributevalue_set.all():
            utils.setOGRFeatureAttribute(attrValue.attribute, attrValue.value,
                                         dstFeature, shapefile.encoding)

        layer.CreateFeature(dstFeature)
        dstFeature.Destroy()

    datasource.Destroy()

    # Compress the shapefile
    temp = tempfile.TemporaryFile()
    zipHandle = zipfile.ZipFile(temp, 'w', zipfile.ZIP_DEFLATED)

    for fName in os.listdir(dstDir):
        zipHandle.write(os.path.join(dstDir, fName), fName)
    zipHandle.close()

    # useful links to temp's directory
    #  shapefileBase = os.path.splitext ( dstFile) [0]
    shapefileName = os.path.splitext(shapefile.filename)[0]

    # Delete the temporary files
    shutil.rmtree(dstDir)

    # Return the zip archive to the user
    f = FileWrapper(temp)
    response = HttpResponse(f, content_type="application\zip")
    response ['Content-Disposition'] = \
       "attachment; filename=" + shapefileName + ".zip"
    response['Content-Length'] = temp.tell()
    temp.seek(0)
    return response
Esempio n. 27
0
def exportMultiLayersData(layers, encoding, EPSG, format):
    try:
        dstSpatialRef = osr.SpatialReference()
        dstSpatialRef.ImportFromEPSG(EPSG)
        driver = ogr.GetDriverByName(ogrDriver_dic[format])
        dstDir = tempfile.mkdtemp()
        dstFile = str(
            os.path.join(dstDir, layers[0].filename + filenameExt_dic[format]))
        datasource = driver.CreateDataSource(dstFile)
        for layer in layers:

            vlayer = datasource.CreateLayer(str(layer.filename), dstSpatialRef)

            #retrive attributes
            for attr in layer.attribute_set.all():
                field = ogr.FieldDefn(str(attr.name), attr.type)
                field.SetWidth(attr.width)
                field.SetPrecision(attr.precision)
                vlayer.CreateField(field)

            #save features in shapefile
            srcSpatialRef = osr.SpatialReference()
            srcSpatialRef.ImportFromEPSG(3857)
            coordTransform = osr.CoordinateTransformation(
                srcSpatialRef, dstSpatialRef)
            geomField = utils.calcGeometryField(layer.geom_type)
            for feature in layer.feature_set.all().order_by('id_relat'):

                if geometry:
                    geometry = getattr(feature, geomField)
                    geometry = utils.unwrapGEOSGeometry(geometry)
                    dstGeometry = ogr.CreateGeometryFromWkt(geometry.wkt)
                    dstGeometry.Transform(coordTransform)
                else:
                    dstGeometry = None

            #save attributes in the shapefile
                dstFeature = ogr.Feature(vlayer.GetLayerDefn())
                dstFeature.SetGeometry(dstGeometry)
                for attrName, attrValue in feature.attribute_value.iteritems():
                    attribute = Attribute.objects.get(
                        name=str(attrName), shapefile=feature.shapefile)
                    utils.setOGRFeatureAttribute(attribute, attrValue,
                                                 dstFeature, encoding)
                vlayer.CreateFeature(dstFeature)
                dstFeature.Destroy()
        datasource.Destroy()

        #compress the shapefile
        temp = tempfile.TemporaryFile()
        zip = zipfile.ZipFile(temp, 'w', zipfile.ZIP_DEFLATED)
        shapefileBase = os.path.splitext(dstFile)[0]
        shapefileName = os.path.splitext(layer.filename)[0]
        for fName in os.listdir(dstDir):
            zip.write(os.path.join(dstDir, fName), fName)
        zip.close()

        #delete temporary files
        shutil.rmtree(dstDir)

        #return the zip to user
        f = FileWrapper(temp)
        response = StreamingHttpResponse(f, content_type="application/zip")
        response[
            'Content-Disposition'] = "attachment; filename=" + shapefileName + fileExt_dic[
                format]
        response['Content-Length'] = temp.tell()
        temp.seek(0)

        return None, response

    except BaseException, e:
        print "test" + e
        return e.rstrip('\n'), None
Esempio n. 28
0
def export_handler(request, course_key_string):
    """
    The restful handler for exporting a course.

    GET
        html: return html page for import page
        application/x-tgz: return tar.gz file containing exported course
        json: not supported

    Note that there are 2 ways to request the tar.gz file. The request header can specify
    application/x-tgz via HTTP_ACCEPT, or a query parameter can be used (?_accept=application/x-tgz).

    If the tar.gz file has been requested but the export operation fails, an HTML page will be returned
    which describes the error.
    """
    course_key = CourseKey.from_string(course_key_string)
    if not has_course_author_access(request.user, course_key):
        raise PermissionDenied()

    course_module = modulestore().get_course(course_key)

    # an _accept URL parameter will be preferred over HTTP_ACCEPT in the header.
    requested_format = request.REQUEST.get(
        '_accept', request.META.get('HTTP_ACCEPT', 'text/html'))

    export_url = reverse_course_url('export_handler',
                                    course_key) + '?_accept=application/x-tgz'
    if 'application/x-tgz' in requested_format:
        name = course_module.url_name
        export_file = NamedTemporaryFile(prefix=name + '.', suffix=".tar.gz")
        root_dir = path(mkdtemp())

        try:
            export_to_xml(modulestore(), contentstore(), course_module.id,
                          root_dir, name)

            logging.debug(u'tar file being generated at {0}'.format(
                export_file.name))
            with tarfile.open(name=export_file.name, mode='w:gz') as tar_file:
                tar_file.add(root_dir / name, arcname=name)
        except SerializationError as exc:
            log.exception(u'There was an error exporting course %s',
                          course_module.id)
            unit = None
            failed_item = None
            parent = None
            try:
                failed_item = modulestore().get_item(exc.location)
                parent_loc = modulestore().get_parent_location(
                    failed_item.location)

                if parent_loc is not None:
                    parent = modulestore().get_item(parent_loc)
                    if parent.location.category == 'vertical':
                        unit = parent
            except:  # pylint: disable=bare-except
                # if we have a nested exception, then we'll show the more generic error message
                pass

            return render_to_response(
                'export.html', {
                    'context_course':
                    course_module,
                    'in_err':
                    True,
                    'raw_err_msg':
                    str(exc),
                    'failed_module':
                    failed_item,
                    'unit':
                    unit,
                    'edit_unit_url':
                    reverse_usage_url("container_handler", parent.location)
                    if parent else "",
                    'course_home_url':
                    reverse_course_url("course_handler", course_key),
                    'export_url':
                    export_url
                })
        except Exception as exc:
            log.exception('There was an error exporting course %s',
                          course_module.id)
            return render_to_response(
                'export.html', {
                    'context_course':
                    course_module,
                    'in_err':
                    True,
                    'unit':
                    None,
                    'raw_err_msg':
                    str(exc),
                    'course_home_url':
                    reverse_course_url("course_handler", course_key),
                    'export_url':
                    export_url
                })
        finally:
            shutil.rmtree(root_dir / name)

        wrapper = FileWrapper(export_file)
        response = HttpResponse(wrapper, content_type='application/x-tgz')
        response[
            'Content-Disposition'] = 'attachment; filename=%s' % os.path.basename(
                export_file.name.encode('utf-8'))
        response['Content-Length'] = os.path.getsize(export_file.name)
        return response

    elif 'text/html' in requested_format:
        return render_to_response('export.html', {
            'context_course': course_module,
            'export_url': export_url
        })

    else:
        # Only HTML or x-tgz request formats are supported (no JSON).
        return HttpResponse(status=406)
Esempio n. 29
0
def download(modeladmin, request, selected):
    buf = StringIO('This is the content of the file')
    return StreamingHttpResponse(FileWrapper(buf))
Esempio n. 30
0
    def dispatch(self, request, *args, **kwargs):

        if self.preProcessPage(request, **kwargs):

            document = None

            try:
                document = Document.objects.get(pk=self.page_context['ix'])
                # document exists
            except ObjectDoesNotExist:
                # document does not exist
                if request.user.id is None:
                    # user is not logged in
                    return self.showErrorPage(
                        request, 'You need to log in to see this document.')
                else:
                    # user is logged in - give them the honest answer
                    return self.showErrorPage(
                        request, 'Error: document id ' +
                        self.page_context['ix'] + ' does not exist')
            except:
                errstr = 'Error: invalid parameter supplied'
                logger.error(errstr, exc_info=True)
                return self.showErrorPage(request, errstr)

            authmessage = 'Public download'

            # security check
            if document.grouplist is not None:
                if len(document.grouplist) > 0:
                    if request.user.id is not None:
                        authorised = False
                        groupListArr = document.grouplist.split(',')
                        for group in groupListArr:
                            for usergroup in request.user.groups.all():
                                if group == unicode(usergroup):
                                    authorised = True
                                    authmessage = 'User authorised by membership of group ' + group

                        if not authorised:
                            self.logdownload(
                                request, 0, document.id,
                                'Denied access, insufficient access rights')
                            return self.showErrorPage(
                                request,
                                'You do not have sufficient access rights to see this document.'
                            )

                    else:
                        self.logdownload(request, 0, document.id,
                                         'Denied access, not logged in')
                        return self.showErrorPage(
                            request,
                            'You need to log in to see this document.')

            from django.conf import settings
            the_media_root = getattr(settings, "SECURE_MEDIA_ROOT",
                                     settings.MEDIA_ROOT)

            filename = the_media_root + '/' + str(document.url)
            try:
                # try to open the file from the url given in the documents table
                wrapper = FileWrapper(open(filename, "rb"))
            except:
                errmsg = 'Error: source file cannot be found on host filesystem: ' + filename
                logger.error(errmsg, exc_info=True)
                self.logdownload(
                    request, 0, document.id,
                    'Failed: source file cannot be found on host filesystem')
                return self.showErrorPage(request, errmsg)

            response = HttpResponse(wrapper)

            response['Content-Length'] = os.path.getsize(filename)

            type, encoding = mimetypes.guess_type(filename)
            if type is None:
                type = 'application/octet-stream'
            response['Content-Type'] = type
            if encoding is not None:
                response['Content-Encoding'] = encoding

            response[
                'Content-Disposition'] = 'attachment; filename="' + os.path.basename(
                    filename) + '"'

            self.logdownload(request, os.path.getsize(filename), document.id,
                             'Successful download (' + authmessage + ')')

            return response