def phila_gis_proxy_view(request, path):
    """
    A small proxy for Philly GIS services, so that they are accessible over
    HTTPS.
    """
    root = 'http://gis.phila.gov/ArcGIS/rest/services/PhilaGov/'
    return proxy_view(request, root + path)
Example #2
0
def pipe_wms_to_geoserver(request, path=""):
    # TODO: implement authentication here
    layers = request.GET.get("layers", "")

    extra_requests_args = dict()
    remoteurl = 'http://geoserver:8080/geoserver/adalitix/wms'
    return proxy_view(request, remoteurl, extra_requests_args)
Example #3
0
def api(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    root = settings.SHAREABOUTS.get('DATASET_ROOT')
    api_key = settings.SHAREABOUTS.get('DATASET_KEY')
    api_session_cookie = request.COOKIES.get('sa-api-sessionid')

    # It doesn't matter what the CSRF token value is, as long as the cookie and
    # header value match.
    api_csrf_token = '1234csrf567token'

    url = make_resource_uri(path, root)
    headers = {'X-SHAREABOUTS-KEY': api_key, 'X-CSRFTOKEN': api_csrf_token}
    cookies = {'sessionid': api_session_cookie,
               'csrftoken': api_csrf_token} \
              if api_session_cookie else {'csrftoken': api_csrf_token}

    # Clear cookies from the current domain, so that they don't interfere with
    # our settings here.
    request.META.pop('HTTP_COOKIE', None)
    response = proxy_view(request,
                          url,
                          requests_args={
                              'headers': headers,
                              'cookies': cookies
                          })

    if place_was_created(request, path, response):
        send_place_created_notifications(request, response)

    return response
Example #4
0
def phila_gis_proxy_view(request, path):
    """
    A small proxy for Philly GIS services, so that they are accessible over
    HTTPS.
    """
    root = 'http://gis.phila.gov/ArcGIS/rest/services/PhilaGov/'
    return proxy_view(request, root + path)
Example #5
0
def csv_download(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    root = settings.SHAREABOUTS.get('DATASET_ROOT')
    api_key = settings.SHAREABOUTS.get('DATASET_KEY')
    api_session_cookie = request.COOKIES.get('sa-api-session')

    url = make_resource_uri(path, root)
    headers = {'X-Shareabouts-Key': api_key, 'ACCEPT': 'text/csv'}
    cookies = {'sessionid': api_session_cookie} if api_session_cookie else {}
    return proxy_view(request,
                      url,
                      requests_args={
                          'headers': headers,
                          'cookies': cookies
                      })

    # Send the csv as a timestamped download
    filename = '.'.join(
        [os.path.split(path)[1],
         now().strftime('%Y%m%d%H%M%S'), 'csv'])
    response['Content-disposition'] = 'attachment; filename=' + filename

    return response
Example #6
0
def layer_proxy_view(request, layer_id):
    layer = get_object_or_404(Layer, id=layer_id, proxy_url=True)
    new_req_url = None
    
    to = request.GET.get('to')
    from_ = request.GET.get('from')
    
    if not to and not from_: 
        try:
            limit = settings.MP_ONTOLOGY_FILTER_DEFAULT_LIMIT
        except: 
            limit = 3
        
        to = datetime.date.today()
        from_ = to.replace(year=to.year - limit)

        to = to.strftime('%Y-%m-%d')
        from_ = from_.strftime('%Y-%m-%d')
    
    concepts = request.GET.get('concepts', [])
    if concepts: 
        concepts = [concepts.split(',')]
    categories = request.GET.get('categories', [])
    if categories: 
        categories = categories.split(',')
    type_ = request.GET.get('type')
    
    for category in categories:
        # Get the list of categories in the concept
        concept_list = RDFConcept.objects.filter(preflabel=category)
        if not concept_list:
            continue
        concepts.append(child.slug 
                        for child in concept_list[0].get_descendants()
                        if child.slug)

    query_parameters = []
    if from_:
        query_parameters.append("from=%s" % from_)
    if to: 
        query_parameters.append("to=%s" % to)
    if type_: 
        query_parameters.append("type=%s" % type_)

    for concept_list in concepts:
        query_parameters.append('c=%s' % (','.join(concept_list)))

    if query_parameters: 
        new_req_url = '%s&%s' % (layer.url, '&'.join(query_parameters))

    resp = None
    if new_req_url:
        r = requests.get(new_req_url)
        # Note: Future versions will need to use r.iter_content and a django
        # Streaming HTTP Response. 
        resp = HttpResponse(r.text, r.status_code)
    else:
        # If we don't have any categories we just do this:
        resp = proxy_view(request, layer.url)
    return resp
Example #7
0
def csv_download(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    root = settings.SHAREABOUTS.get('DATASET_ROOT')
    api_key = settings.SHAREABOUTS.get('DATASET_KEY')
    api_session_cookie = request.COOKIES.get('sa-api-session')

    url = make_resource_uri(path, root)
    headers = {
        'X-Shareabouts-Key': api_key,
        'ACCEPT': 'text/csv'
    }
    cookies = {'sessionid': api_session_cookie} if api_session_cookie else {}
    return proxy_view(request, url, requests_args={
        'headers': headers,
        'cookies': cookies
    })

    # Send the csv as a timestamped download
    filename = '.'.join([os.path.split(path)[1],
                        now().strftime('%Y%m%d%H%M%S'),
                        'csv'])
    response['Content-disposition'] = 'attachment; filename=' + filename

    return response
Example #8
0
def api(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    root = settings.SHAREABOUTS.get('DATASET_ROOT')
    api_key = settings.SHAREABOUTS.get('DATASET_KEY')
    api_session_cookie = request.COOKIES.get('sa-api-sessionid')

    # It doesn't matter what the CSRF token value is, as long as the cookie and
    # header value match.
    api_csrf_token = '1234csrf567token'

    url = make_resource_uri(path, root)
    headers = {'X-SHAREABOUTS-KEY': api_key,
               'X-CSRFTOKEN': api_csrf_token}
    cookies = {'sessionid': api_session_cookie,
               'csrftoken': api_csrf_token} \
              if api_session_cookie else {'csrftoken': api_csrf_token}

    # Clear cookies from the current domain, so that they don't interfere with
    # our settings here.
    request.META.pop('HTTP_COOKIE', None)
    response = proxy_view(request, url, requests_args={
        'headers': headers,
        'cookies': cookies
    })

    if place_was_created(request, path, response):
        send_place_created_notifications(request, response)

    return response
Example #9
0
def file_handler(request, id):
    if request.method == "POST":
        try:
            data = request.FILES.get("data", None)
            file = File.objects.get(id=id)
            file_type = file.file_type

            if file_type == "other":
                file.file = data
                file.save()
            else:
                put_layer(id, data, file_type)

            return HttpResponse("Upload successful")
        except Exception as e:
            print(e)
            return HttpResponse("Upload failed", status=500)
    elif request.method == "GET":
        f = File.objects.get(id=id)
        file_type = f.file_type
        if (file_type == "other"):
            return FileResponse(f.file)
        elif (file_type == "tiff"):
            extra_requests_args = dict()
            remoteurl = 'http://geoserver:8080' + \
                "/geoserver/adalitix/ows?service=WCS&version=2.0.0&request=GetCoverage&coverageId=adalitix:" + id
            return proxy_view(request, remoteurl, extra_requests_args)
    else:
        return HttpResponse("Method Not Allowed", status=405)
Example #10
0
def backend_proxy_view(request,
                       path,
                       use_auth_header=True,
                       base_remote_url=None):
    """
        TODO: hacky as it's getting the base_url and the auth header from the
            get_connection slumber object.

            Also, we should limit the endpoint accessible from this proxy

        if you specifiy `use_auth_header` to be false, then it won't use the zone
        or url info from the get_connection slumber object. In that case you
        should pass in the base_remote_url yourself.
    """
    assert use_auth_header or base_remote_url
    if use_auth_header:
        client = get_connection(request)
        extra_requests_args = {
            "headers": {
                k.upper(): v
                for k, v in dict([client._store["session"].auth.get_header()
                                  ]).items()
            }
        }
        if not base_remote_url:
            base_remote_url = client._store["base_url"]
    else:
        extra_requests_args = {}
    remoteurl = u"%s%s" % (base_remote_url, path)
    return proxy_view(request, remoteurl, extra_requests_args)
Example #11
0
def proxy_request(request, path):
    if sys.version_info > (3, 0):
        # Python 3 code in this block
        get_keys = request.GET.keys()
    else:
        # Python 2 code in this block
        get_keys = request.GET.iterkeys()

    parameters = []
    for key in get_keys:
        value_list = request.GET.getlist(key)
        if key == 'viewparams':
            parameters.extend([
                '%s=%s' % (key, urllib.parse.quote(val)) for val in value_list
            ])
        else:
            parameters.extend([
                '%s=%s' % (key, urllib.parse.quote(val)) for val in value_list
            ])

    if parameters:
        path += '?' + '&'.join(parameters)

    # If somehow we get malformed url
    if len(path.split('://')) == 1:
        new_path = path.split(':/')
        path = '://'.join(new_path)

    request.session['access_token'] = None
    return proxy_view(request, url=path)
Example #12
0
def mapproxy_view(request, path):
    headers = {
        "Host": "localhost:8000",
        "X-Script-Name": "/mapproxy"
    }

    remoteurl = 'http://127.0.0.1:8889/' + path
    return proxy_view(request, remoteurl, {"headers": headers})
Example #13
0
def capabilities_proxy_view(request, layer_id):
    layer = get_object_or_404(Layer, id=layer_id)
    if layer.url.endswith('?'):
        orig_url = layer.url[:-1]
    else:
        orig_url = layer.url
    capabilities_url = orig_url + '?request=getCapabilities'
    extra_requests_args = {'headers': {'CONTENT-TYPE': 'text/xml'}}
    return proxy_view(request, capabilities_url, extra_requests_args)
Example #14
0
def trademarkify_habr_page(request, path):
    remote_url = 'http://habr.ru/' + path
    response = proxy_view(request, remote_url)

    if 'text/html' in response['content-type']:  # чтоб зря не парсить всякое
        parser = TradeMarkifyHTMLParser(response.content)
        response.content = parser.trademarkify()

    return response
Example #15
0
def celery_flower_view(request, path):
    remote_url = 'http://{}/{}'.format(flower_url, path)
    try:
        response = proxy_view(request, remote_url)
    except Exception as e:
        msg = _("<h1>Flow service unavailable, check it</h1>") + \
              '<br><br> <div>{}</div>'.format(e)
        response = HttpResponse(msg)
    return response
Example #16
0
 def get(self, request):
     user = request.user
     path = request.path.replace('/media/', '')
     if id:
         course = Exercise.objects.get(video=path).schedule.course
         if user in course.users.all():
             secret = settings.SECRET_HASH
             remoteurl = 'http://127.0.0.1/{}'.format(secret) + request.path
             return proxy_view(request, remoteurl)
         else:
             raise NotFound
Example #17
0
def celery_flower_view(request, path):
    if not request.user.has_perm('ops.view_taskmonitor'):
        return HttpResponse("Forbidden")
    remote_url = 'http://{}/core/flower/{}'.format(flower_url, path)
    try:
        response = proxy_view(request, remote_url)
    except Exception as e:
        msg = _("<h1>Flower service unavailable, check it</h1>") + \
              '<br><br> <div>{}</div>'.format(e)
        response = HttpResponse(msg)
    return response
Example #18
0
def api(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    root = settings.SHAREABOUTS.get('DATASET_ROOT')
    api_key = settings.SHAREABOUTS.get('DATASET_KEY')

    url = make_resource_uri(path, root)
    headers = {'X-Shareabouts-Key': api_key}
    return proxy_view(request, url, requests_args={'headers': headers})
Example #19
0
def petfinder_view(request, path):
    requests_args = {
        'params': {
            'key': PETFINDER_API_KEY,
            'format': 'json',
            'animal': 'cat',
            'count': 30
        }
    }
    remoteurl = 'http://api.petfinder.com/' + path
    return proxy_view(request, remoteurl, requests_args)
Example #20
0
def api(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    root = settings.SHAREABOUTS.get('DATASET_ROOT')
    api_key = settings.SHAREABOUTS.get('DATASET_KEY')

    url = make_resource_uri(path, root)
    headers = {'X-Shareabouts-Key': api_key}
    return proxy_view(request, url, requests_args={'headers': headers})
Example #21
0
def api(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    config = get_shareabouts_config(settings.SHAREABOUTS_CONFIG)

    dataset = config['dataset']
    api_key = config['dataset_api_key']
    url = make_resource_uri(dataset, path, config['api_root'])

    headers = {'X-Shareabouts-Key': api_key}
    return proxy_view(request, url, requests_args={'headers': headers})
Example #22
0
def api(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    config = get_shareabouts_config(settings.SHAREABOUTS_CONFIG)

    dataset = config['dataset']
    api_key = config['dataset_api_key']
    url = make_resource_uri(dataset, path, config['api_root'])

    headers = {'X-Shareabouts-Key': api_key}
    return proxy_view(request, url, requests_args={'headers': headers})
Example #23
0
def users(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    user authentication.
    """
    root = make_auth_root(settings.SHAREABOUTS.get("DATASET_ROOT"))
    api_key = settings.SHAREABOUTS.get("DATASET_KEY")
    api_session_cookie = request.COOKIES.get("sa-api-session")

    url = make_resource_uri(path, root)
    headers = {"X-Shareabouts-Key": api_key} if api_key else {}
    cookies = {"sessionid": api_session_cookie} if api_session_cookie else {}
    return proxy_view(request, url, requests_args={"headers": headers, "allow_redirects": False, "cookies": cookies})
Example #24
0
def proxy_cra_requests(request, path):
    '''
  Proxy various requests sent by Create-React-App projects in dev mode ("npm start"), within
  Django-hosted views, to the Create-React-App liveserver

  Works well with the following re_path definitions in your project's urls.py:

  re_path(r'^sockjs-node/(?P<path>.*)$', proxy_sockjs),
  re_path(r'^__webpack_dev_server__/(?P<path>.*)$', proxy_sockjs),
  '''
    path = request.path
    url = '{}{}'.format(CRA_SERVER_URL, path)
    return proxy_view(request, url)
Example #25
0
def embeddable(request, id_mapa):
    m = get_object_or_404(Mapa, id_mapa=id_mapa)
    if ManejadorDePermisos.permiso_de_mapa(request.user, m) is None:
        return HttpResponseForbidden()
    	
    extra_requests_args = {}
    mapfile=ManejadorDeMapas.commit_mapfile(id_mapa)
    if m.tipo_de_mapa == 'general':
    	for c in m.capas.all():
    		ManejadorDeMapas.commit_mapfile(c.id_capa)
    
    # remote_url = MAPSERVER_URL+'?map='+mapfile +'&mode=browse&layers=all'
    remote_url = mapserver.get_map_browser_url(id_mapa)
    # print remote_url
    return views.proxy_view(request, remote_url, extra_requests_args)
def icommons_rest_api_proxy(request, path):
    request_args = {
        'headers': {
            'Authorization': "Token {}".format(settings.ICOMMONS_REST_API_TOKEN)
        }
    }

    # Remove resource_link_id query param
    # request.GET is immutable, so we need to copy before modifying
    request.GET = request.GET.copy()
    request.GET.pop('resource_link_id', None)

    url = "{}/{}".format(settings.ICOMMONS_REST_API_HOST, os.path.join(path, ''))
    if settings.ICOMMONS_REST_API_SKIP_CERT_VERIFICATION:
        request_args['verify'] = False
    return proxy_view(request, url, request_args)
Example #27
0
def legend(request, id_mapa):
    mapa = get_object_or_404(Mapa, id_mapa=id_mapa)
    if ManejadorDePermisos.permiso_de_mapa(request.user, mapa) is None:
        return HttpResponseForbidden()

    capa = mapa.capas.first()
    extra_requests_args = {}
    mapfile = ManejadorDeMapas.commit_mapfile(id_mapa)

    if mapa.tipo_de_mapa == 'layer_raster_band':
        sld = mapa.mapserverlayer_set.first().archivo_sld
    else:
        sld = capa.dame_sld_default()
    remote_url = mapserver.get_legend_graphic_url(id_mapa, capa.nombre, sld)
    # remote_url = MAPSERVER_URL+'?map='+mapfile +'&SERVICE=WMS&VERSION=1.3.0&SLD_VERSION=1.1.0&REQUEST=GetLegendGraphic&FORMAT=image/png&LAYER=%s&STYLE=&SLD=%s'%(capa.nombre,capa.dame_sld_default())
    return views.proxy_view(request, remote_url, extra_requests_args)
Example #28
0
def users(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    user authentication.
    """
    root = make_auth_root(settings.SHAREABOUTS.get('DATASET_ROOT'))
    api_key = settings.SHAREABOUTS.get('DATASET_KEY')
    api_session_cookie = request.COOKIES.get('sa-api-session')

    url = make_resource_uri(path, root)
    headers = {'X-Shareabouts-Key': api_key} if api_key else {}
    cookies = {'sessionid': api_session_cookie} if api_session_cookie else {}
    return proxy_view(request, url, requests_args={
        'headers': headers,
        'allow_redirects': False,
        'cookies': cookies
    })
Example #29
0
def users(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    user authentication.
    """
    root = make_auth_root(settings.SHAREABOUTS.get('DATASET_ROOT'))
    api_key = settings.SHAREABOUTS.get('DATASET_KEY')
    api_session_cookie = request.COOKIES.get('sa-api-session')

    url = make_resource_uri(path, root)
    headers = {'X-Shareabouts-Key': api_key} if api_key else {}
    cookies = {'sessionid': api_session_cookie} if api_session_cookie else {}
    return proxy_view(request, url, requests_args={
        'headers': headers,
        'allow_redirects': False,
        'cookies': cookies
    })
Example #30
0
def csv_download(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    root = settings.SHAREABOUTS.get("DATASET_ROOT")
    api_key = settings.SHAREABOUTS.get("DATASET_KEY")
    api_session_cookie = request.COOKIES.get("sa-api-session")

    url = make_resource_uri(path, root)
    headers = {"X-Shareabouts-Key": api_key, "ACCEPT": "text/csv"}
    cookies = {"sessionid": api_session_cookie} if api_session_cookie else {}
    return proxy_view(request, url, requests_args={"headers": headers, "cookies": cookies})

    # Send the csv as a timestamped download
    filename = ".".join([os.path.split(path)[1], now().strftime("%Y%m%d%H%M%S"), "csv"])
    response["Content-disposition"] = "attachment; filename=" + filename

    return response
Example #31
0
def csv_download(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    config = get_shareabouts_config(settings.SHAREABOUTS_CONFIG)

    dataset = config['dataset']
    api_key = config['dataset_api_key']
    url = make_resource_uri(dataset, path, config['api_root'])

    headers = {'X-Shareabouts-Key': api_key, 'ACCEPT': 'text/csv'}
    response = proxy_view(request, url, requests_args={'headers': headers})

    # Send the csv as a timestamped download
    filename = '.'.join(
        [os.path.split(path)[1],
         now().strftime('%Y%m%d%H%M%S'), 'csv'])
    response['Content-disposition'] = 'attachment; filename=' + filename

    return response
def icommons_rest_api_proxy(request, path):
    request_args = {
        'headers': {
            'Authorization': "Token {}".format(settings.ICOMMONS_REST_API_TOKEN)
        }
    }

    # Remove resource_link_id query param
    # request.GET is immutable, so we need to copy before modifying
    request.GET = request.GET.copy()
    request.GET.pop('resource_link_id', None)

    # tlt-1314: include audit information when creating xlistmaps
    if request.method == 'POST' and 'xlist_maps' in path:
        body_json = json.loads(request.body)
        body_json['last_modified_by'] = request.LTI['lis_person_sourcedid']
        request_args['data'] = json.dumps(body_json)

    url = "{}/{}".format(settings.ICOMMONS_REST_API_HOST, os.path.join(path, ''))
    if settings.ICOMMONS_REST_API_SKIP_CERT_VERIFICATION:
        request_args['verify'] = False
    return proxy_view(request, url, request_args)
Example #33
0
    def visualize(self, request, path):
        """Render visualization for current model.

        (A hacky implementation) This method tries to launch a tensorboard
        subprocess and then proxy django requests to it for visualization.
        The subprocess information is stored in a file in the model directory to
        record whether a process has been launched and the port it listens to.
        This implementation can leads to many process (max is the # of detectors
        available) being launched.

        Arguments:
            request {django http request} -- request coming in
            path {request path} -- http request path stripped of api prefixes

        Raises:
            Http404: [description]
            Http404: [description]

        Returns:
            Django HTTP response -- Returns a Django HTTP response for visualization.
        """
        if not self._output_dir.exists():
            raise Http404('Model directory not exists. Has the model been trained?')

        # tensorboard needs index.html to load correctly
        if len(path) == 0:
            path = 'index.html'

        pinfo_file_path = (self._output_dir / 'tensorboard.pinfo').resolve()
        self._run_tensorboard_subprocess_if_not_exist(pinfo_file_path)
        if pinfo_file_path.exists():
            with open(pinfo_file_path, 'r') as f:
                subprocess_info = json.loads(f.read())
                port = subprocess_info['port']
            remoteurl = 'http://localhost:{}/'.format(port) + path
            return proxy_view(request, remoteurl)
        else:
            raise Http404
Example #34
0
def backend_proxy_view(request, path, use_auth_header=True, base_remote_url=None):
    """
        TODO: hacky as it's getting the base_url and the auth header from the
            get_connection slumber object.

            Also, we should limit the endpoint accessible from this proxy

        if you specifiy `use_auth_header` to be false, then it won't use the zone
        or url info from the get_connection slumber object. In that case you
        should pass in the base_remote_url yourself.
    """
    assert use_auth_header or base_remote_url
    if use_auth_header:
        client = get_connection(request)
        extra_requests_args = {
            "headers": {k.upper(): v for k, v in dict([client._store["session"].auth.get_header()]).items()}
        }
        if not base_remote_url:
            base_remote_url = client._store["base_url"]
    else:
        extra_requests_args = {}
    remoteurl = u"%s%s" % (base_remote_url, path)
    return proxy_view(request, remoteurl, extra_requests_args)
Example #35
0
def icommons_rest_api_proxy(request, path):
    request_args = {
        'headers': {
            'Authorization':
            "Token {}".format(settings.ICOMMONS_REST_API_TOKEN)
        }
    }

    # Remove resource_link_id query param
    # request.GET is immutable, so we need to copy before modifying
    request.GET = request.GET.copy()
    request.GET.pop('resource_link_id', None)

    # tlt-1314: include audit information when creating xlistmaps
    if request.method == 'POST' and 'xlist_maps' in path:
        body_json = json.loads(request.body)
        body_json['last_modified_by'] = request.LTI['lis_person_sourcedid']
        request_args['data'] = json.dumps(body_json)

    url = "{}/{}".format(settings.ICOMMONS_REST_API_HOST,
                         os.path.join(path, ''))
    if settings.ICOMMONS_REST_API_SKIP_CERT_VERIFICATION:
        request_args['verify'] = False
    return proxy_view(request, url, request_args)
Example #36
0
def csv_download(request, path):
    """
    A small proxy for a Shareabouts API server, exposing only
    one configured dataset.
    """
    config = get_shareabouts_config(settings.SHAREABOUTS_CONFIG)

    dataset = config['dataset']
    api_key = config['dataset_api_key']
    url = make_resource_uri(dataset, path, config['api_root'])

    headers = {
        'X-Shareabouts-Key': api_key,
        'ACCEPT': 'text/csv'
    }
    response = proxy_view(request, url, requests_args={'headers': headers})

    # Send the csv as a timestamped download
    filename = '.'.join([os.path.split(path)[1],
                        now().strftime('%Y%m%d%H%M%S'),
                        'csv'])
    response['Content-disposition'] = 'attachment; filename=' + filename

    return response
Example #37
0
def habr_proxy(request, path):
    url = ('https://habr.com/' + path)
    response = proxy_view(request, url)
    if not re.match('text/html', response['Content-Type']):
        return response

    soup = bs4.BeautifulSoup(response.content, 'html5lib')

    habr_link = re.compile('https?://habr\.com')
    local_link = r'http://127.0.0.1:8000'
    for string in soup.findAll(string=True):
        if (type(string) == bs4.element.NavigableString
                and string.parent.name not in ['script', 'style']):
            res = re.sub(r'\b(\w{6})\b', r'\1™', string)
            if (string != res):
                string.replaceWith(res)

    for attr_name in ('xlink:href', 'href'):
        for tag in soup.find_all(attrs={attr_name: habr_link}):
            link = re.sub(habr_link, local_link, tag[attr_name])
            tag[attr_name] = link

    response.content = str(soup)
    return response
Example #38
0
def export_search_to_csv(self, body, user_handle, retry:bool = True) -> None:


    CSV_HEADER = [
        'profile_id',
        'join_date',
        'github_created_at',
        'first_name',
        'last_name',
        'email',
        'handle',
        'sms_verification',
        'persona',
        'rank_coder',
        'rank_funder',
        'num_hacks_joined',
        'which_hacks_joined',
        'hack_work_starts',
        'hack_work_submits',
        'hack_work_start_orgs',
        'hack_work_submit_orgs',
        'bounty_work_starts',
        'bounty_work_submits',
        'hack_started_feature',
        'hack_started_code_review',
        'hack_started_security',
        'hack_started_design',
        'hack_started_documentation',
        'hack_started_bug',
        'hack_started_other',
        'hack_started_improvement',
        'started_feature',
        'started_code_review',
        'started_security',
        'started_design',
        'started_documentation',
        'started_bug',
        'started_other',
        'started_improvement',
        'submitted_feature',
        'submitted_code_review',
        'submitted_security',
        'submitted_design',
        'submitted_documentation',
        'submitted_bug',
        'submitted_other',
        'submitted_improvement',
        'bounty_earnings',
        'bounty_work_start_orgs',
        'bounty_work_submit_orgs',
        'kudos_sends',
        'kudos_receives',
        'hack_winner_kudos_received',
        'grants_opened',
        'grant_contributed',
        'grant_contributions',
        'grant_contribution_amount',
        'num_actions',
        'action_points',
        'avg_points_per_action',
        'last_action_on',
        'keywords',
        'activity_level',
        'reliability',
        'average_rating',
        'longest_streak',
        'earnings_count',
        'follower_count',
        'following_count',
        'num_repeated_relationships',
        'verification_status'
    ]

    user_profile = Profile.objects.get(handle=user_handle)

    PAGE_SIZE = 1000
    proxy_req = HttpRequest()
    proxy_req.method = 'GET'
    remote_url = f'{settings.HAYSTACK_ELASTIC_SEARCH_URL}/haystack/modelresult/_search'

    query_data = json.loads(body)
    proxy_request = proxy_view(proxy_req, remote_url, {'data': body})
    proxy_json_str = proxy_request.content.decode('utf-8')
    proxy_body = json.loads(proxy_json_str)
    if not proxy_body['timed_out']:
        total_hits = proxy_body['hits']['total']
        hits = proxy_body['hits']['hits']
        finished = False
        output = []
        results = []
        if total_hits < PAGE_SIZE:
            finished = True
            results = hits

        if not finished:

            max_loops = math.ceil(total_hits / PAGE_SIZE)
            for x in range(0, max_loops):
                new_body = query_data
                new_body['from'] = 0 if x is 0 else (PAGE_SIZE * x) + 1
                new_body['size'] = PAGE_SIZE
                new_body = json.dumps(new_body)
                proxy_request = proxy_view(proxy_req, remote_url, {'data': new_body})
                proxy_json_str = proxy_request.content.decode('utf-8')
                proxy_body = json.loads(proxy_json_str)
                hits = proxy_body['hits']['hits']
                results = results + hits

        for result in results:
            source = result['_source']
            row_item = {}
            for k in source.copy():

                new_column = k.replace('_exact', '')

                if new_column in CSV_HEADER:
                    row_item[new_column] = source[k]
            output.append(row_item)
        now = datetime.now()
        csv_file_path = f'/tmp/user-directory-export-{user_profile.handle}-{now}.csv'
        try:
            with open(csv_file_path, 'w', encoding='utf-8') as csvfile:
                writer = csv.DictWriter(csvfile, fieldnames=CSV_HEADER)
                writer.writeheader()
                writer.writerows(output)
        except IOError:
            print("I/O error")
        if os.path.isfile(csv_file_path):

            to_email = user_profile.user.email
            from_email = settings.CONTACT_EMAIL

            subject = "Your exported user directory csv is attached"
            html = text = f'Your exported {csv_file_path.replace("/tmp/", "")} is attached.'
            send_mail(
                from_email,
                to_email,
                subject,
                text,
                html,
                from_name=f"@{user_profile.handle}",
                categories=['transactional'],
                csv=csv_file_path
            )
Example #39
0
def pipe_ows_to_geoserver(request, path=""):
    # TODO: implement authentication here
    extra_requests_args = dict()
    remoteurl = 'http://geoserver:8080/geoserver/adalitix/ows'
    return proxy_view(request, remoteurl, extra_requests_args)
Example #40
0
def view(request, project_id):
    return proxy_view(request, "http://" + project_id)
Example #41
0
def fcs_proxy(request):
    return proxy_view(request, 'https://www.redditcfb.com/fcstop25.txt')
Example #42
0
def proxy_markdownify_view(request):
    if not is_user_an_editor(request):
        return HttpResponseForbidden()
    remote_url = API_HOST + '/markdownx/markdownify/'
    return proxy_view(request, remote_url, {})
Example #43
0
 def proxy(request, path):
     remote_url = 'http://localhost:3001/' + path
     return proxy_view(request, remote_url, {})
def view(request, project_id):
    proj = Play_Project.objects.get(unique_id=project_id)
    remoteurl = project_id + ".app:8000"
    return proxy_view(request, remoteurl)