Exemplo n.º 1
0
 def upsert(self, image_request, temp_fp, image_info):
     target_fp = self.create_dir_and_return_file_path(
         image_request=image_request,
         image_info=image_info
     )
     safe_rename(temp_fp, target_fp)
     return target_fp
Exemplo n.º 2
0
    def test_renames_file_correctly(self, src, dst):
        assert os.path.exists(src)
        assert not os.path.exists(dst)

        utils.safe_rename(src, dst)

        assert not os.path.exists(src)
        assert os.path.exists(dst)
        assert open(dst, 'rb').read() == b'hello world'
Exemplo n.º 3
0
 def test_if_error_is_unexpected_then_is_raised(self, src, dst):
     """
     If the error from ``os.rename()`` isn't because we're trying to copy
     across a filesystem boundary, we get an error
     """
     message = "Exception thrown in utils_t.py for TestRename"
     m = mock.Mock(side_effect=OSError(-1, message))
     with mock.patch('loris.utils.os.rename', m):
         with pytest.raises(OSError):
             utils.safe_rename(src, dst)
Exemplo n.º 4
0
    def copy_to_cache(self, ident):
        ident = unquote(ident)

        #get source image and write to temporary file
        (source_url, options) = self._web_request_url(ident)
        assert source_url is not None

        cache_dir = self.cache_dir_path(ident)
        mkdir_p(cache_dir)

        with closing(requests.get(source_url, stream=True,
                                  **options)) as response:
            if not response.ok:
                logger.warn(
                    "Source image not found at %s for identifier: %s. "
                    "Status code returned: %s.", source_url, ident,
                    response.status_code)
                raise ResolverException(
                    "Source image not found for identifier: %s. "
                    "Status code returned: %s." %
                    (ident, response.status_code))

            extension = self.cache_file_extension(ident, response)
            local_fp = join(cache_dir, "loris_cache." + extension)

            with tempfile.NamedTemporaryFile(dir=cache_dir,
                                             delete=False) as tmp_file:
                for chunk in response.iter_content(2048):
                    tmp_file.write(chunk)

        # Now rename the temp file to the desired file name if it still
        # doesn't exist (another process could have created it).
        #
        # Note: This is purely an optimisation; if the file springs into
        # existence between the existence check and the copy, it will be
        # overridden.
        if exists(local_fp):
            logger.info('Another process downloaded src image %s', local_fp)
            remove(tmp_file.name)
        else:
            safe_rename(tmp_file.name, local_fp)
            logger.info("Copied %s to %s", source_url, local_fp)

        # Check for rules file associated with image file
        # These files are < 2k in size, so fetch in one go.
        # Assumes that the rules will be next to the image
        # cache_dir is image specific, so this is easy

        bits = split(source_url)
        fn = bits[1].rsplit('.')[0] + "." + self.auth_rules_ext
        rules_url = bits[0] + '/' + fn
        try:
            resp = requests.get(rules_url)
            if resp.status_code == 200:
                local_rules_fp = join(cache_dir,
                                      "loris_cache." + self.auth_rules_ext)
                if not exists(local_rules_fp):
                    with open(local_rules_fp, 'w') as fh:
                        fh.write(resp.text)
        except:
            # No connection available
            pass

        return local_fp
Exemplo n.º 5
0
    def copy_to_cache(self, ident):
        """ downloads image source file from s3, if not in cache already."""
        ident = unquote(ident)

        # get source image and write to temporary file
        (bucketname, keyname) = self.s3bucket_from_ident(ident)

        try:
            s3obj = self.s3.Object(bucketname, keyname)
            content_type = s3obj.content_type
        except Exception as e:
            msg = "no content_type for s3 object ({}:{}): {}".format(
                bucketname, keyname, e)
            logger.error(msg)
            raise ResolverException(msg)

        extension = self.cache_file_extension(ident, content_type)
        cache_dir = self.cache_dir_path(ident)
        os.makedirs(cache_dir, exist_ok=True)
        local_fp = os.path.join(cache_dir, "loris_cache." + extension)
        with tempfile.NamedTemporaryFile(dir=cache_dir,
                                         delete=False) as tmp_file:
            try:
                self.s3.Bucket(bucketname).download_fileobj(keyname, tmp_file)
            except Exception as e:
                msg = "unable to access or save s3 object ({}:{}): {}".format(
                    bucketname, keyname, e)
                logger.error(msg)
                raise ResolverException(msg)

        # Now rename the temp file to the desired file name if it still
        # doesn't exist (another process could have created it).
        #
        # Note: This is purely an optimisation; if the file springs into
        # existence between the existence check and the copy, it will be
        # overridden.
        if os.path.exists(local_fp):
            logger.info(
                "Another process downloaded src image {}".format(local_fp))
            os.remove(tmp_file.name)
        else:
            safe_rename(tmp_file.name, local_fp)
            logger.info("Copied {}:{} to {}".format(bucketname, keyname,
                                                    local_fp))

        # Check for rules file associated with image file
        # These files are < 2k in size, so fetch in one go.
        # Assumes that the rules will be next to the image
        # cache_dir is image specific, so this is easy
        bits = os.path.split(keyname)  # === bash basename
        fn = bits[1].rsplit(".")[0] + "." + self.auth_rules_ext
        rules_keyname = bits[0] + "/" + fn
        local_rules_fp = os.path.join(cache_dir,
                                      "loris_cache." + self.auth_rules_ext)
        try:
            self.s3.Object(bucketname,
                           rules_keyname).download_file(local_rules_fp)
        except Exception as e:
            # no connection available?
            msg = "ignoring rules file({}/{}) for ident({}): {}".format(
                bucketname, rules_keyname, ident, e)
            logger.warn(msg)

        return local_fp