Beispiel #1
0
    def _get_acls(self, user_id, data_url):

        # parts = urlsplit(data_url)
        # check_url = urlunsplit((parts.scheme, parts.netloc, parts.path, None, None))
        # # TODO:  ... should we do just a HEAD requset instead of API call?
        # if not check_url.endswith('/'):
        #     check_url = "{0}/".format(check_url)
        # check_url = urljoin(check_url, 'API/site/v1/can_access')

        # TODO: ensure that request Session passes cookie on to correct domain
        s = requests.Session()
        for name in ('__ac', ):  # 'serverid'):
            # pass all interesting cookies for our domain on
            # we copy cookies, so that we can set domain etc...
            cookie = self.request.cookies.get(name)
            if not cookie:
                continue
            if name == '__ac':
                # append tokens if we set __ac cookie
                # get my tokens
                tokens = ','.join([
                    token.strip()
                    for token in self.request.registry.settings.get(
                        'authtkt.tokens', '').split('\n') if token.strip()
                ])
                if cookie and tokens:
                    cookie = update_auth_cookie(cookie, tokens, self.request)
            if cookie:
                secure = self.request.registry.get(
                    'authtkt.secure',
                    'False').lower() not in ('false', '0', 'no')
                s.cookies.set(name,
                              cookie,
                              secure=secure,
                              domain=self.request.host,
                              path='/')
        # TODO: use with or whatever to close session
        from pyramid.settings import asbool  # FIXME: avoid circular import?
        verify = asbool(
            self.request.registry.settings.get('bccvl.ssl.verify', True))
        r = s.head(data_url, verify=verify, allow_redirects=True)
        s.close()
        # TODO: do we have to handle redirects specially?
        if r.status_code == 200:
            permission = security.Allow
        else:
            permission = security.Deny
        # LOG.info('Access %s for user %s to %s', permission, user_id, data_url)
        # LOG.info('   -> %s', r.status_code)
        return (permission, user_id, 'view')
Beispiel #2
0
    def _get_acls(self, user_id, data_url):

        # parts = urlsplit(data_url)
        # check_url = urlunsplit((parts.scheme, parts.netloc, parts.path, None, None))
        # # TODO:  ... should we do just a HEAD requset instead of API call?
        # if not check_url.endswith('/'):
        #     check_url = "{0}/".format(check_url)
        # check_url = urljoin(check_url, 'API/site/v1/can_access')

        # TODO: ensure that request Session passes cookie on to correct domain
        s = requests.Session()
        for name in ('__ac',):  # 'serverid'):
            # pass all interesting cookies for our domain on
            # we copy cookies, so that we can set domain etc...
            cookie = self.request.cookies.get(name)
            if not cookie:
                continue
            if name == '__ac':
                # append tokens if we set __ac cookie
                # get my tokens
                tokens = ','.join([token.strip() for token in
                                  self.request.registry.settings.get('authtkt.tokens', '').split('\n') if token.strip()])
                if cookie and tokens:
                    cookie = update_auth_cookie(cookie, tokens, self.request)
            if cookie:
                secure = self.request.registry.get('authtkt.secure', 'False').lower() not in ('false', '0', 'no')
                s.cookies.set(name, cookie, secure=secure, domain=self.request.host, path='/')
        # TODO: use with or whatever to close session
        from pyramid.settings import asbool  # FIXME: avoid circular import?
        verify = asbool(self.request.registry.settings.get('bccvl.ssl.verify', True))
        r = s.head(data_url, verify=verify, allow_redirects=True)
        s.close()
        # TODO: do we have to handle redirects specially?
        if r.status_code == 200:
            permission = security.Allow
        else:
            permission = security.Deny
        # LOG.info('Access %s for user %s to %s', permission, user_id, data_url)
        # LOG.info('   -> %s', r.status_code)
        return (permission, user_id, 'view')
Beispiel #3
0
def fetch_file(request, url):
    """Dowload the file from url and place it on the local file system.
    If file is a zip file it will be extracted to the local file system.

    The method returns the filename of the requested file on the
    local file system.
    """
    # TODO: optimize  data files for mapserver?
    # reproject/warp source? to avoid mapserver doing warp on the fly
    # otheroptions:
    #   convert to tiled raster (makes access to tiles faster)
    #     gdal_translate -co TILED=YES original.tif tiled.tif
    #   use Erdas Imagine (HFA) format ... always tiled and supports>4GB files
    #     gdal_translate -of HFA original.tif tiled.img
    #   add overview image to raster (after possible translate)
    #     gdaladdo [-r average] tiled.tif 2 4 8 16 32 64 128
    # for rs point data maybe convert to shapefile?
    if not (url.startswith('http://') or url.startswith('https://')):
        # TODO: probably allow more than just http and https
        #       and use better exception
        raise Exception('unsupported url scheme: %s', url)

    # Check if a local data file is already exist
    datadir = data_dir(request, url)
    url, fragment = urlparse.urldefrag(url)
    # FIXME: have to import here due to circular import
    from pyramid.settings import asbool
    with LockFile(datadir + '.lock'):
        if not os.path.exists(datadir):
            # the folder doesn't exist so we'll have to fetch the file
            # TODO: make sure there is no '..' in datadir
            os.makedirs(datadir)
            # not available yet so fetch it
            try:
                settings = request.registry.settings
                destfile = os.path.join(datadir, os.path.basename(url))
                try:
                    src = {
                        'url': url,
                        'verify': asbool(settings.get('bccvl.ssl.verify', True))
                    }
                    # do we have an __ac cookie?
                    cookie = request.cookies.get('__ac')
                    # get my tokens
                    tokens = ','.join([
                        token.strip()
                        for token in settings.get(
                            'authtkt.tokens', '').split('\n') if token.strip()
                    ])
                    if cookie:
                        src['cookies'] = {
                            'name': '__ac',
                            'value': update_auth_cookie(cookie, tokens, request),
                            'secure': True,
                            'domain': request.host,
                            'path': '/'
                        }
                    dst = {'url': u'file://{0}'.format(destfile)}
                    movelib.move(src, dst)
                except Exception as e:
                    # direct download failed what now?
                    LOG.exception('Failed to download data %s: %s', url, e)
                    raise
                # if it is a zip we should unpack it
                # FIXME: do some more robust zip detection
                if 'application/zip' in mimetypes.guess_type(destfile):
                    with zipfile.ZipFile(destfile, 'r') as zipf:
                        zipf.extractall(datadir)
                    # remove zipfile
                    os.remove(destfile)

                # search all tifs and try to generate overviews
                for root, dirnames, filenames in os.walk(datadir):
                    for filename in fnmatch.filter(filenames, '*.tif'):
                        rasterfile = os.path.join(root, filename)
                        ds = gdal.Open(rasterfile)
                        if ds:
                            maxlevel = min(ds.RasterXSize, ds.RasterYSize) / 512
                            ovrclear = ['gdaladdo', '-clean', rasterfile]
                            ovradd = ['gdaladdo', '-ro',
                                      #'--config', 'COMPRESS_OVERVIEW', 'LZW',
                                      rasterfile,
                            ]
                            level = 2
                            while level < maxlevel:
                                ovradd.append(str(level))
                                level = level * 2
                            if maxlevel > 2:
                                subprocess.check_call(ovrclear)
                                subprocess.check_call(ovradd)

            except Exception as e:
                LOG.error('Could not download %s to %s : %s', url, datadir, e)
                shutil.rmtree(datadir)
                raise e
    # we have the data now construct the filepath
    filename = fragment if fragment else os.path.basename(url)
    # FIXME: make sure path.join works correctly (trailing/leading slash?)
    filename = os.path.join(datadir, filename)
    # make sure filename is within datadir
    filename = os.path.normpath(filename)
    if not os.path.normpath(filename).startswith(datadir):
        # FIXME: should probably check if filename exists and is supported
        #        and use better exception here
        raise Exception("Data file path not valid: '%s'", filename)
    return filename
Beispiel #4
0
def fetch_file(request, url):
    """Dowload the file from url and place it on the local file system.
    If file is a zip file it will be extracted to the local file system.

    The method returns the filename of the requested file on the
    local file system.
    """
    # TODO: optimize  data files for mapserver?
    # reproject/warp source? to avoid mapserver doing warp on the fly
    # otheroptions:
    #   convert to tiled raster (makes access to tiles faster)
    #     gdal_translate -co TILED=YES original.tif tiled.tif
    #   use Erdas Imagine (HFA) format ... always tiled and supports>4GB files
    #     gdal_translate -of HFA original.tif tiled.img
    #   add overview image to raster (after possible translate)
    #     gdaladdo [-r average] tiled.tif 2 4 8 16 32 64 128
    # for rs point data maybe convert to shapefile?
    if not (url.startswith('http://') or url.startswith('https://')):
        # TODO: probably allow more than just http and https
        #       and use better exception
        raise Exception('unsupported url scheme: %s', url)

    # Check if a local data file is already exist
    datadir = data_dir(request, url)
    url, fragment = urlparse.urldefrag(url)
    # FIXME: have to import here due to circular import
    from pyramid.settings import asbool
    with LockFile(datadir + '.lock'):
        if not os.path.exists(datadir):
            # the folder doesn't exist so we'll have to fetch the file
            # TODO: make sure there is no '..' in datadir
            os.makedirs(datadir)
            # not available yet so fetch it
            try:
                settings = request.registry.settings
                destfile = os.path.join(datadir, os.path.basename(url))
                try:
                    src = {
                        'url': url,
                        'verify': asbool(settings.get('bccvl.ssl.verify',
                                                      True))
                    }
                    # do we have an __ac cookie?
                    cookie = request.cookies.get('__ac')
                    # get my tokens
                    tokens = ','.join([
                        token.strip() for token in settings.get(
                            'authtkt.tokens', '').split('\n') if token.strip()
                    ])
                    if cookie:
                        src['cookies'] = {
                            'name': '__ac',
                            'value':
                            update_auth_cookie(cookie, tokens, request),
                            'secure': True,
                            'domain': request.host,
                            'path': '/'
                        }
                    dst = {'url': u'file://{0}'.format(destfile)}
                    movelib.move(src, dst)
                except Exception as e:
                    # direct download failed what now?
                    LOG.exception('Failed to download data %s: %s', url, e)
                    raise
                # if it is a zip we should unpack it
                # FIXME: do some more robust zip detection
                if 'application/zip' in mimetypes.guess_type(destfile):
                    with zipfile.ZipFile(destfile, 'r') as zipf:
                        zipf.extractall(datadir)
                    # remove zipfile
                    os.remove(destfile)

                # search all tifs and try to generate overviews
                for root, dirnames, filenames in os.walk(datadir):
                    for filename in fnmatch.filter(filenames, '*.tif'):
                        rasterfile = os.path.join(root, filename)
                        ds = gdal.Open(rasterfile)
                        if ds:
                            maxlevel = min(ds.RasterXSize,
                                           ds.RasterYSize) / 512
                            ovrclear = ['gdaladdo', '-clean', rasterfile]
                            ovradd = [
                                'gdaladdo',
                                '-ro',
                                #'--config', 'COMPRESS_OVERVIEW', 'LZW',
                                rasterfile,
                            ]
                            level = 2
                            while level < maxlevel:
                                ovradd.append(str(level))
                                level = level * 2
                            if maxlevel > 2:
                                subprocess.check_call(ovrclear)
                                subprocess.check_call(ovradd)

            except Exception as e:
                LOG.error('Could not download %s to %s : %s', url, datadir, e)
                shutil.rmtree(datadir)
                raise e
    # we have the data now construct the filepath
    filename = fragment if fragment else os.path.basename(url)
    # FIXME: make sure path.join works correctly (trailing/leading slash?)
    filename = os.path.join(datadir, filename)
    # make sure filename is within datadir
    filename = os.path.normpath(filename)
    if not os.path.normpath(filename).startswith(datadir):
        # FIXME: should probably check if filename exists and is supported
        #        and use better exception here
        raise Exception("Data file path not valid: '%s'", filename)
    return filename