def run_test(self, filename, content='Lorem ipsum dolar sit amet'):
        content = UnicodeContentFile(content)
        filename = default_storage.save(filename, content)
        self.assert_(default_storage.exists(filename))

        self.assertEqual(default_storage.size(filename), content.size)
        now = datetime.utcnow()
        delta = timedelta(minutes=5)
        mtime = default_storage.getmtime(filename)
        self.assert_(mtime > mktime((now - delta).timetuple()))
        self.assert_(mtime < mktime((now + delta).timetuple()))
        file = default_storage.open(filename)
        self.assertEqual(file.size, content.size)
        fileurl = force_unicode(file).replace('\\', '/')
        fileurl = urlquote_plus(fileurl, '/')
        if fileurl.startswith('/'):
            fileurl = fileurl[1:]
        self.assertEqual(
            MEDIA_URL+fileurl,
            default_storage.url(filename)
        )
        file.close()

        default_storage.delete(filename)
        self.assert_(not default_storage.exists(filename))
Пример #2
0
def sign(version_id, reviewer=False):
    version = Version.objects.get(pk=version_id)
    app = version.addon
    log.info('Signing version: %s of app: %s' % (version_id, app))

    if not app.type == amo.ADDON_WEBAPP:
        log.error('Attempt to sign something other than an app.')
        raise SigningError('Not an app')

    if not app.is_packaged:
        log.error('Attempt to sign a non-packaged app.')
        raise SigningError('Not packaged')

    try:
        file_obj = version.all_files[0]
    except IndexError:
        log.error('Attempt to sign an app with no files in version.')
        raise SigningError('No file')

    path = (file_obj.signed_reviewer_file_path if reviewer else
            file_obj.signed_file_path)
    if storage.exists(path):
        log.info('Already signed app exists.')
        return path

    with statsd.timer('services.sign.app'):
        try:
            sign_app(file_obj.file_path, path, reviewer)
        except SigningError:
            if storage.exists(path):
                storage.delete(path)
            raise
    log.info('Signing complete.')
    return path
Пример #3
0
def task_join_catalogs(job_id):
    """Union of unique rows in two catalogs"""
    job = JoinCatalogs.objects.get(pk=job_id)
    job.job_status = 'started'
    job.save()

    if default_storage.exists(job.left_table.handle.name):
        left = csv.DictReader(default_storage.open(job.left_table.handle.name))
    if default_storage.exists(job.right_table.handle.name):
        right = csv.DictReader(default_storage.open(job.right_table.handle.name))
    keys = set(left.fieldnames + right.fieldnames)
    left = [r for r in left]
    right = [r for r in right]

    joinedCatalogs, columns = join(left, right, job.fk_field)
    path = os.path.join(BASE_DIR, MEDIA_ROOT, 'catalogs', '{}.csv'.format(job.results_label))
    handler = open(path,'w')
    handler = csv.DictWriter(handler, fieldnames=keys)
    handler.writeheader()
    handler.writerows(joinedCatalogs)

    job.completed = 1
    job.results_handle = 'catalogs/{}.csv'.format(job.results_label)

    result = Catalog(
        name=job.results_label,
        handle='catalogs/{}.csv'.format(job.results_label)
    )
    result.save()
    job.complete = True
    job.completed_date = timezone.now()
    job.job_status = 'complete'
    job.save()
    return True
Пример #4
0
    def test_delete_image(self):
        """Test that two images with same size and same name
        can be stored on disk"""
        def create_image():
            filepath = os.path.join(settings.BASE_DIR, 'big.jpeg')
            with open(filepath) as f:
                # prepare form data
                image = InMemoryUploadedFile(
                    f,
                    'image',
                    'big.jpeg',
                    'image/jpeg',
                    42,  # not significant for the test
                    'utf-8'
                )
                files = MultiValueDict()
                files['image'] = image
                post = MultiValueDict()
                post['ptype'] = 1
                post['name'] = 'test with big.jpeg'

                # create form
                form = ImageForm(TestImage, post, files)
                # validate resize operation
                form.is_valid()

                # execute resize operation
                image = form.save()
                return image
        # create two times the same image:
        one = create_image()
        self.assertTrue(default_storage.exists(one.og['filepath']))
        one.delete()
        self.assertFalse(default_storage.exists(one.og['filepath']))
Пример #5
0
    def test_upload_sign_error_existing(self, sign_app_mock):
        sign_app_mock.side_effect = SigningError
        langpack = self.create_langpack()
        eq_(LangPack.objects.count(), 1)
        original_uuid = langpack.uuid
        original_file_path = langpack.file_path
        original_file_version = langpack.file_version
        original_version = langpack.version
        # create_langpack() doesn't create a fake file, let's add one.
        storage.open(langpack.file_path, 'w').close()

        upload = self.upload('langpack')
        with self.assertRaises(SigningError):
            LangPack.from_upload(upload, instance=langpack)
        # Test that we didn't delete the upload file
        ok_(storage.exists(upload.path))
        # Test that we didn't delete the existing filename or alter the
        # existing langpack in the database.
        eq_(LangPack.objects.count(), 1)
        langpack.reload()
        eq_(original_uuid, langpack.uuid)
        eq_(langpack.file_path, original_file_path)
        eq_(original_file_version, langpack.file_version)
        eq_(original_version, langpack.version)
        ok_(storage.exists(langpack.file_path))

        # Cleanup
        storage.delete(langpack.file_path)
    def test_doesnt_delete_file_referenced_by_orphan_and_nonorphan_nodes(self):
        """
        Make sure we don't delete a file, as long as it's referenced
        by a non-orphan node.
        """

        # Our orphan, to be taken soon from this world
        orphan_node = _create_expired_contentnode()

        # our legit node, standing proud and high with its non-orphaned status
        legit_node = ContentNode.objects.create(
            kind_id=content_kinds.VIDEO,
        )

        f = File.objects.create(
            contentnode=legit_node,
            checksum="aaa",
        )
        forphan = File.objects.create(
            contentnode=orphan_node,
            checksum="aaa",
        )

        # The file they both share. This has the same checksum and contents.
        # Alas, a file cannot have an orphan and non-orphan reference. This must
        # not be deleted.
        f.file_on_disk.save("aaa.jpg", ContentFile("aaa"))
        forphan.file_on_disk.save("aaa.jpg", ContentFile("aaa"))

        # check that our file exists in object storage
        assert default_storage.exists("storage/a/a/aaa.jpg")

        clean_up_contentnodes()

        assert default_storage.exists("storage/a/a/aaa.jpg")
Пример #7
0
    def delete_or_disable_related_content_exclude_addons_with_other_devs(self):
        addon = Addon.objects.latest('pk')
        user = UserProfile.objects.get(pk=55021)
        user.update(picture_type='image/png')
        AddonUser.objects.create(addon=addon, user=user_factory())

        # Create a photo so that we can test deletion.
        with storage.open(user.picture_path, 'wb') as fobj:
            fobj.write('test data\n')

        with storage.open(user.picture_path_original, 'wb') as fobj:
            fobj.write('original test data\n')

        assert user.addons.count() == 1
        rating = Rating.objects.create(
            user=user, addon=addon, version=addon.current_version)
        Rating.objects.create(
            user=user, addon=addon, version=addon.current_version,
            reply_to=rating)
        Collection.objects.create(author=user)

        # Now that everything is set up, disable/delete related content.
        user.delete_or_disable_related_content()

        # The add-on should not have been touched, it has another dev.
        assert user.addons.exists()
        addon.reload()
        assert addon.status == amo.STATUS_PUBLIC

        assert not user._ratings_all.exists()  # Even replies.
        assert not user.collections.exists()

        assert not storage.exists(user.picture_path)
        assert not storage.exists(user.picture_path_original)
Пример #8
0
    def delete_or_disable_related_content_actually_delete(self):
        addon = Addon.objects.latest('pk')
        user = UserProfile.objects.get(pk=55021)
        user.update(picture_type='image/png')

        # Create a photo so that we can test deletion.
        with storage.open(user.picture_path, 'wb') as fobj:
            fobj.write('test data\n')

        with storage.open(user.picture_path_original, 'wb') as fobj:
            fobj.write('original test data\n')

        assert user.addons.count() == 1
        rating = Rating.objects.create(
            user=user, addon=addon, version=addon.current_version)
        Rating.objects.create(
            user=user, addon=addon, version=addon.current_version,
            reply_to=rating)
        Collection.objects.create(author=user)

        # Now that everything is set up, delete related content.
        user.delete_or_disable_related_content(delete=True)

        assert not user.addons.exists()

        assert not user._ratings_all.exists()  # Even replies.
        assert not user.collections.exists()

        assert not storage.exists(user.picture_path)
        assert not storage.exists(user.picture_path_original)
Пример #9
0
def _user_detail(request, user):
    ctx = _entity_detail(request, user)
    ctx['photosUrl'] = reverse('fotos', kwargs={'path':''}) + \
                                        '?q=tag:'+str(user.name)
    photos_path = path.join(settings.SMOELEN_PHOTOS_PATH, str(user.name))
    if default_storage.exists(photos_path + '.jpg'):
        img = Image.open(default_storage.open(photos_path + '.jpg'))
        width, height = img.size
        if default_storage.exists(photos_path + '.orig'):
            # smoel was created using newer strategy. Shrink until it fits the
            # requirements.
            width, height = resize_proportional(img.size[0], img.size[1],
                                                settings.SMOELEN_WIDTH,
                                                settings.SMOELEN_HEIGHT)
        elif width > settings.SMOELEN_WIDTH:
            # smoel was created as high-resolution image, probably 600px wide
            width /= 2
            height /= 2
        else:
            # smoel was created as normal image, probably 300px wide
            pass
        ctx.update({
                'hasPhoto': True,
                'photoWidth': width,
                'photoHeight': height})
    return render_to_response('leden/user_detail.html', ctx,
            context_instance=RequestContext(request))
Пример #10
0
def attachment_remove(r):
    if not r.user.is_authenticated():
        return HttpResponse(get_json_response(code=403, message='Unauthorized'))

    try:
        if 'base_name' in r.POST and len(r.POST['base_name']) > 0:
            # deleting recently uploaded image
            if not re.match('[a-z0-9]+\.[a-z]+', r.POST['base_name']):
                raise ValueError(u'Передано некорректное имя файла')

            fname_base = r.POST['base_name']
            uploaded_file = 'uploads/%s' % fname_base
            thumb_file = "uploads/%s" % NewsPost.get_thumbname_from_base(fname_base)
            if fs.exists(uploaded_file):
                try:
                    fs.delete(uploaded_file)
                except:
                    pass
            if fs.exists(thumb_file):
                try:
                    fs.delete(thumb_file)
                except:
                    pass
    except ValueError as e:
        HttpResponse(get_json_response(code=1, message=e.message))

    return HttpResponse(get_json_response(code=0))
Пример #11
0
def attach_delete(request):
    if not request.user.is_authenticated():
        return HttpResponse(get_json_response(code=403, message='Unauthorized'))

    try:
        if 'uid' in request.POST and len(request.POST['uid']) > 0:
            if not re.match('[a-zA-Z0-9]+', request.POST['uid']):
                raise ValueError(u'Передано некорректное имя файла')
            uid = request.POST['uid']
            if not uid in request.session:
                raise ValueError(u'Сессия завершена!')

            uploaded_file = request.session[uid][1]
            thumb_file = request.session[uid][2]

            del request.session[uid]

            if fs.exists(uploaded_file):
                try:
                    fs.delete(uploaded_file)
                except IOError as e:
                    log.error("Failed to delete file '%s'. %s" % (uploaded_file, e.message))
            if fs.exists(thumb_file):
                try:
                    fs.delete(thumb_file)
                except IOError as e:
                    log.error("Failed to delete file '%s'. %s" % (thumb_file, e.message))
    except ValueError as e:
        HttpResponse(get_json_response(code=400, message=e.message))
    return HttpResponse(get_json_response(code=200))
Пример #12
0
    def delete(self, hard=False):
        # Recursive import
        from olympia.users.tasks import delete_photo

        # Cache the values in case we do a hard delete and loose
        # reference to the user-id.
        picture_path = self.picture_path
        original_picture_path = self.picture_path_original

        if hard:
            super(UserProfile, self).delete()
        else:
            log.info(
                u'User (%s: <%s>) is being anonymized.' % (self, self.email))
            self.email = None
            self.fxa_id = None
            self.display_name = None
            self.homepage = ''
            self.location = ''
            self.deleted = True
            self.picture_type = None
            self.auth_id = generate_auth_id()
            self.last_login_attempt = None
            self.last_login_attempt_ip = ''
            self.last_login_ip = ''
            self.anonymize_username()
            self.save()

        if storage.exists(picture_path):
            delete_photo.delay(picture_path)

        if storage.exists(original_picture_path):
            delete_photo.delay(original_picture_path)
Пример #13
0
def sign(version_id, reviewer=False, resign=False, **kw):
    version = Version.objects.get(pk=version_id)
    app = version.addon
    log.info('Signing version: %s of app: %s' % (version_id, app))

    if not app.type == amo.ADDON_WEBAPP:
        log.error('[Webapp:%s] Attempt to sign something other than an app.' %
                  app.id)
        raise SigningError('Not an app')

    if not app.is_packaged:
        log.error('[Webapp:%s] Attempt to sign a non-packaged app.' % app.id)
        raise SigningError('Not packaged')

    try:
        file_obj = version.all_files[0]
    except IndexError:
        log.error(
            '[Webapp:%s] Attempt to sign an app with no files in version.' %
            app.id)
        raise SigningError('No file')

    path = (file_obj.signed_reviewer_file_path if reviewer else
            file_obj.signed_file_path)

    if storage.exists(path) and not resign:
        log.info('[Webapp:%s] Already signed app exists.' % app.id)
        return path

    if resign:
        z = zipfile.ZipFile(file_obj.file_path, 'r')
        if 'META-INF/ids.json' in z.namelist():
            # This zip is broken due to previously used bad signing
            # code. rebuild it. (This code can be deleted once all
            # broken packages are re-signed.)
            tempf = tempfile.NamedTemporaryFile()
            zout = zipfile.ZipFile(tempf, 'w', zipfile.ZIP_DEFLATED)
            for f in sorted(z.infolist()):
                if f.filename != 'META-INF/ids.json':
                    zout.writestr(f, z.read(f.filename))
            zout.close()
            shutil.copyfile(tempf.name, file_obj.file_path)
            tempf.close()

    ids = json.dumps({
        'id': app.guid,
        'version': version_id
    })
    with statsd.timer('services.sign.app'):
        try:
            sign_app(file_obj.file_path, path, ids, reviewer)
        except SigningError:
            log.info('[Webapp:%s] Signing failed' % app.id)
            if storage.exists(path):
                storage.delete(path)
            raise
    log.info('[Webapp:%s] Signing complete.' % app.id)
    return path
Пример #14
0
    def test_extra_methodes(self):
        default_storage.mkdir('test_directory')
        self.assertTrue(default_storage.exists('test_directory'))

        default_storage.mv('test_directory', 'moved_directory')
        self.assertTrue(default_storage.exists('moved_directory'))

        default_storage.delete('moved_directory')
        self.assertFalse(default_storage.exists('moved_directory'))
Пример #15
0
def test_populate_e10s_feature_compatibility():
    # Create addons...
    # One must have no latest file object.
    addon_unreviewed = addon_factory(
        name='no current version', status=amo.STATUS_UNREVIEWED)
    addon_unreviewed.update(_current_version=None)
    assert addon_unreviewed.get_latest_file() is None

    # One must have a latest file object with no file on the filesystem.
    addon_no_file = addon_factory(name='no file')
    assert not storage.exists(addon_no_file.get_latest_file().file_path)

    # One must have a file, and be e10s incompatible
    addon = addon_factory(guid='guid@xpi', name='not e10s compatible')
    AMOPaths().xpi_copy_over(addon.get_latest_file(), 'extension.xpi')
    assert storage.exists(addon.get_latest_file().file_path)

    # One must have a file, and be e10s compatible
    addon_compatible = addon_factory(
        guid='guid-e10s@xpi', name='e10s compatible')
    AMOPaths().xpi_copy_over(
        addon_compatible.get_latest_file(), 'extension_e10s.xpi')
    assert storage.exists(addon_compatible.get_latest_file().file_path)

    # One must have a file, and be a web extension
    addon_webextension = addon_factory(
        guid='@webextension-guid', name='web extension')
    AMOPaths().xpi_copy_over(
        addon_webextension.get_latest_file(), 'webextension.xpi')
    assert storage.exists(addon_webextension.get_latest_file().file_path)

    # One must be unlisted, and compatible.
    addon_compatible_unlisted = addon_factory(
        guid='unlisted-guid-e10s@xpi', name='unlisted e10s compatible webext',
        is_listed=False)
    AMOPaths().xpi_copy_over(
        addon_compatible_unlisted.get_latest_file(), 'webextension_no_id.xpi')
    assert storage.exists(
        addon_compatible_unlisted.get_latest_file().file_path)

    # Call the command !
    call_command('process_addons', task='populate_e10s_feature_compatibility')

    assert AddonFeatureCompatibility.objects.count() == 3

    addon.reload()
    assert addon.feature_compatibility.pk
    assert addon.feature_compatibility.e10s == amo.E10S_UNKNOWN

    addon_compatible.reload()
    assert addon_compatible.feature_compatibility.pk
    assert addon_compatible.feature_compatibility.e10s == amo.E10S_COMPATIBLE

    addon_webextension.reload()
    assert addon_webextension.feature_compatibility.pk
    assert (addon_webextension.feature_compatibility.e10s ==
            amo.E10S_COMPATIBLE_WEBEXTENSION)
Пример #16
0
def test_extract_header_img_missing():
    file_obj = os.path.join(
        settings.ROOT, 'src/olympia/devhub/tests/addons/static_theme.zip')
    data = {'images': {'headerURL': 'missing_file.png'}}
    dest_path = tempfile.mkdtemp()
    header_file = dest_path + '/missing_file.png'
    assert not default_storage.exists(header_file)

    utils.extract_header_img(file_obj, data, dest_path)
    assert not default_storage.exists(header_file)
Пример #17
0
    def setup_files(self):
        # Clean out any left over stuff.
        if storage.exists(self.file.signed_file_path):
            storage.delete(self.file.signed_file_path)

        # Make sure the source file is there.
        if not storage.exists(self.file.file_path):
            os.makedirs(os.path.dirname(self.file.file_path))
            shutil.copyfile(self.packaged_app_path('mozball.zip'),
                            self.file.file_path)
Пример #18
0
    def test_doesnt_remove_non_empty_directories(self):
        # Add an extra disabled file. The approved one should move, but not the
        # other, so the directory should be left intact.
        self.disabled_file = file_factory(
            version=self.version, status=amo.STATUS_DISABLED)
        self.addon.update(status=amo.STATUS_APPROVED)
        self.file_.update(status=amo.STATUS_APPROVED)
        with storage.open(self.file_.guarded_file_path, 'wb') as fp:
            fp.write(b'content')
        assert not storage.exists(self.file_.file_path)
        assert storage.exists(self.file_.guarded_file_path)
        with storage.open(self.disabled_file.guarded_file_path, 'wb') as fp:
            fp.write(b'disabled content')
        assert not storage.exists(self.disabled_file.file_path)
        assert storage.exists(self.disabled_file.guarded_file_path)

        cron.unhide_disabled_files()

        assert storage.exists(self.file_.file_path)
        assert not storage.exists(self.file_.guarded_file_path)

        # The disabled file shouldn't have moved.
        assert not storage.exists(self.disabled_file.file_path)
        assert storage.exists(self.disabled_file.guarded_file_path)
        # The directory in guarded file path should still exist.
        assert storage.exists(os.path.dirname(self.file_.guarded_file_path))
    def test_media_storage(self):
        tmp_storage = MediaStorage()
        tmp_storage.save(self.test_string)
        name = tmp_storage.name

        tmp_storage = MediaStorage(name=name)
        self.assertEqual(self.test_string, tmp_storage.read())

        self.assertTrue(default_storage.exists(tmp_storage.get_full_path()))
        tmp_storage.remove()
        self.assertFalse(default_storage.exists(tmp_storage.get_full_path()))
Пример #20
0
 def test_download__success(self):
     """
     Scenario: Download image, should download the image
     Expected:
     - image saved in collected images and 
     - its thumbnail is generated
     """
     url = 'http://s1.proxy03.twitpic.com/photos/large/399879761.jpg'
     download(url)
     self.assertTrue(default_storage.exists(self.image_path))
     self.assertTrue(default_storage.exists(self.thumb_image_path))
def check_img_url(value, thumb = False):
    if not thumb:
        if default_storage.exists(value.file_path):
            return value.file_path.url
        else: 
            return "http://placehold.it/%sx%s" % (value.file_height, value.file_width)
    else:
        if default_storage.exists(value.file_path):
            return value.thumb_path.url
        else: 
            return "http://placehold.it/%sx%s" % (value.thumb_height, value.thumb_width)
Пример #22
0
    def post(self, request, competition_name, round_name, param):
        """
        B{Set} the round file
        B{URL:} ../api/v1/set_round_file/<competition_name>/<round_name>/<param>/

        Upload resource

        :type  competition_name: str
        :param competition_name: The competition name
        :type  round_name: str
        :param round_name: The round name
        :type  param: str
        :param param: grid, lab or param_list
        :type  path: str
        :param path: The path to the file
        """
        path = request.data.get('path', None)

        if path is None:
            return Response({'status': 'Bad request',
                             'message': 'The file that you requested doesn\'t exists!'},
                            status=status.HTTP_400_BAD_REQUEST)

        if not default_storage.exists(path):
            return Response({'status': 'Bad request',
                             'message': 'The file that you requested doesn\'t exists!'},
                            status=status.HTTP_400_BAD_REQUEST)

        # verify if is a valid path
        if not path.startswith('resources/'):
            return Response({'status': 'Bad request',
                             'message': 'Invalid file!'},
                            status=status.HTTP_400_BAD_REQUEST)

        if param not in ['grid', 'lab', 'param_list']:
            return Response({'status': 'Bad request',
                             'message': 'Please provide one of those params: grid, lab or param_list'},
                            status=status.HTTP_400_BAD_REQUEST)

        competition = get_object_or_404(Competition.objects.all(), name=competition_name)
        r = get_object_or_404(Round.objects.all(), name=round_name, parent_competition=competition)

        if bool(getattr(r, param + '_path', '')) and default_storage.exists(getattr(r, param + '_path', '')) \
                and getattr(r, param + "_can_delete"):
            default_storage.delete(getattr(r, param + "_path", None))

        setattr(r, param + "_path", default_storage.path(path))
        setattr(r, param + "_can_delete", False)
        r.save()

        return Response({'status': 'Uploaded',
                         'message': 'The file has been associated!'},
                        status=status.HTTP_201_CREATED)
Пример #23
0
def deletion(self, isMenu):
    if isMenu:
        if default_storage.exists(os.path.join(settings.MEDIA_ROOT,''.join([self.menuName]))):
            shutil.rmtree(''.join([settings.MEDIA_ROOT,self.menuName]), ignore_errors=True)
            return
    if default_storage.exists(os.path.join(settings.MEDIA_ROOT,self.logo.name)):
        if 'default.png' not in self.logo.name:
            os.remove(os.path.join(settings.MEDIA_ROOT,self.logo.name))
    if default_storage.exists(os.path.join(settings.MEDIA_ROOT,self.thumbnail.name)):
        if 'default.png' not in self.thumbnail.name:
            os.remove(os.path.join(settings.MEDIA_ROOT,self.thumbnail.name))
    return
Пример #24
0
 def check_delete(self, file_, filename):
     """Test that when the File object is deleted, it is removed from the
     filesystem."""
     try:
         with storage.open(filename, 'w') as f:
             f.write('sample data\n')
         assert storage.exists(filename)
         file_.delete()
         assert not storage.exists(filename)
     finally:
         if storage.exists(filename):
             storage.delete(filename)
Пример #25
0
 def test_file_handling(self):
     cf = CachedFile()
     val = SimpleUploadedFile("testfile.txt", b"file_content")
     cf.file.save("testfile.txt", val)
     cf.type = "text/plain"
     cf.filename = "testfile.txt"
     cf.save()
     assert default_storage.exists(cf.file.name)
     with default_storage.open(cf.file.name, 'r') as f:
         assert f.read().strip() == "file_content"
     cf.delete()
     assert not default_storage.exists(cf.file.name)
Пример #26
0
 def test_delete_by_version(self):
     """Test that version (soft)delete doesn't delete the file."""
     f = File.objects.get(pk=67442)
     try:
         with storage.open(f.file_path, 'w') as fi:
             fi.write('sample data\n')
         assert storage.exists(f.file_path)
         f.version.delete()
         assert storage.exists(f.file_path)
     finally:
         if storage.exists(f.file_path):
             storage.delete(f.file_path)
Пример #27
0
	def handle_noargs(self, **options):
		# First, clear all adjusted images that reference nonexistant
		# storage paths.
		storage_paths = AdjustedImage.objects.values_list('storage_path', flat=True).distinct()
		nonexistant = [path for path in storage_paths if not default_storage.exists(path)]
		self._delete_queryset(AdjustedImage.objects.filter(storage_path__in=nonexistant))

		# Second, clear all areas that reference nonexistant storage paths.
		storage_paths = Area.objects.values_list('storage_path', flat=True).distinct()
		nonexistant = [path for path in storage_paths if not default_storage.exists(path)]
		self._delete_queryset(Area.objects.filter(storage_path__in=nonexistant))

		# Now clear all duplicate adjusted images.
		fields = (
			'storage_path',
			'requested_width',
			'requested_height',
			'requested_crop',
			'requested_adjustment',
			'requested_max_width',
			'requested_max_height'
		)
		kwargs_list = AdjustedImage.objects.values(*fields
										  ).annotate(count=models.Count('id')
										  ).filter(count__gt=1
										  ).values(*fields)
		duplicate_pks = []
		for kwargs in kwargs_list:
			pks = AdjustedImage.objects.filter(**kwargs).values_list('pk', flat=True)
			duplicate_pks.extend(list(pks)[1:])
		self._delete_queryset(AdjustedImage.objects.filter(pk__in=duplicate_pks),
							  reason='is a duplicate',
							  reason_plural='are duplicates')

		# Now clean up files that aren't referenced by any adjusted images.
		known_paths = set(AdjustedImage.objects.values_list('adjusted', flat=True).distinct())
		orphaned_count = 0
		for dirpath, dirnames, filenames in self._walk('daguerre', topdown=False):
			for filename in filenames:
				filepath = os.path.join(dirpath, filename)
				if filepath not in known_paths:
					orphaned_count += 1
					try:
						default_storage.delete(filepath)
					except IOError:
						pass
		if not orphaned_count:
			self.stdout.write("No orphaned files found.\n")
		else:
			self.stdout.write("Deleted {0} orphaned file{1}.\n".format(orphaned_count, pluralize(orphaned_count)))

		self.stdout.write("\n")
Пример #28
0
 def test_remove_local_files(self):
     file_path = join(config.TEMP_DIR, 'test_remove_local.txt')
     with open(file_path, 'w') as wfile:
         wfile.write('dummy')
     new_path = utils.move_to_storage(storage, file_path, 'tests')
     local = models.LocalContent(
         url='http://whatever',
         local_path=new_path,
     )
     self.assertEquals(storage.exists(new_path), True)
     local.remove_files()
     self.assertEqual(local.local_path, '')
     self.assertEquals(storage.exists(new_path), False)
Пример #29
0
def image_status(request, addon_id, addon, icon_size=64):
    # Default icon needs no checking.
    if not addon.icon_type or addon.icon_type.split('/')[0] == 'icon':
        icons = True
    else:
        icons = storage.exists(
            os.path.join(addon.get_icon_dir(), '%s-%s.png' % (
                addon.id, icon_size)))
    previews = all(storage.exists(p.thumbnail_path)
                   for p in addon.get_previews())
    return {'overall': icons and previews,
            'icons': icons,
            'previews': previews}
Пример #30
0
    def test_ban_and_disable_related_content_bulk(self, hide_disabled_mock):
        user_sole = user_factory(email='*****@*****.**', fxa_id='13579')
        addon_sole = addon_factory(users=[user_sole])
        self.setup_user_to_be_have_content_disabled(user_sole)
        user_multi = user_factory(email='*****@*****.**', fxa_id='24680')
        innocent_user = user_factory()
        addon_multi = addon_factory(
            users=UserProfile.objects.filter(
                id__in=[user_multi.id, innocent_user.id]))
        self.setup_user_to_be_have_content_disabled(user_multi)

        # Now that everything is set up, disable/delete related content.
        UserProfile.ban_and_disable_related_content_bulk(
            [user_sole, user_multi])

        addon_sole.reload()
        addon_multi.reload()
        # if sole dev should have been disabled, but the author retained
        assert addon_sole.status == amo.STATUS_DISABLED
        assert list(addon_sole.authors.all()) == [user_sole]
        # shouldn't have been disabled as it has another author
        assert addon_multi.status != amo.STATUS_DISABLED
        assert list(addon_multi.authors.all()) == [innocent_user]

        # the File objects have been disabled
        assert not File.objects.filter(version__addon=addon_sole).exclude(
            status=amo.STATUS_DISABLED).exists()
        # But not for the Add-on that wasn't disabled
        assert File.objects.filter(version__addon=addon_multi).exclude(
            status=amo.STATUS_DISABLED).exists()

        assert not user_sole._ratings_all.exists()  # Even replies.
        assert not user_sole.collections.exists()
        assert not user_multi._ratings_all.exists()  # Even replies.
        assert not user_multi.collections.exists()

        assert not storage.exists(user_sole.picture_path)
        assert not storage.exists(user_sole.picture_path_original)
        assert not storage.exists(user_multi.picture_path)
        assert not storage.exists(user_multi.picture_path_original)

        assert user_sole.deleted
        assert user_sole.email == '*****@*****.**'
        assert user_sole.auth_id
        assert user_sole.fxa_id == '13579'
        assert user_multi.deleted
        assert user_multi.email == '*****@*****.**'
        assert user_multi.auth_id
        assert user_multi.fxa_id == '24680'

        hide_disabled_mock.assert_not_called()
Пример #31
0
 def test_copy_to_mirror(self):
     f = File.objects.get(id=67442)
     self.clean_files(f)
     f.copy_to_mirror()
     assert storage.exists(f.mirror_file_path)
Пример #32
0
def save_to_files(request):
    if request.method == "POST":
        print("POST: ", request.POST)
        print("FILES: ", request.FILES)
        try:
            form = UploadFileForm(data=request.POST, files=request.FILES)
            if form.is_valid():
                # get md5 value. Note: consider (file + parameters) as a whole md5
                form_file = form.cleaned_data['file']

                str_parameters = form.cleaned_data['parameters']
                print(str_parameters)
                print("OK here0")
                parameters = json.loads(str_parameters)
                print(parameters)
                print("OK here1")

                b_file = form_file.read()
                print("OK here 2")
                file_parameters = str_parameters.encode('utf-8') + b_file
                md5 = hashlib.md5(file_parameters).hexdigest()

                sub_base = "md5_data/"
                path = sub_base + md5

                # check if the file exists
                md5ob = get_object_or_None(UploadParametersMD5, md5=md5)
                print("Md5 existed in DB?: ", md5ob)

                if md5ob:
                    status_old = md5ob.status
                    print("existed in databse, code: ", status_old)
                    time_ = md5ob.time
                    if status_old == 200:
                        return_json = [{
                            'md5': md5,
                            'time': time_,
                            'save_status': 'Finished'
                        }]
                        return JsonResponse(return_json, safe=False)
                    ##elif status_old == 201:
                    ##    return_json = [{'md5': md5, 'time': time_, 'save_status': True}]
                    ##    return JsonResponse(return_json, safe=False)
                    elif status_old == 202:
                        return_json = [{
                            'md5': md5,
                            'time': time_,
                            'save_status': "Running"
                        }]
                        return JsonResponse(return_json, safe=False)

                # check if the file existed in filesystem
                if md5ob:
                    print("Previous saving failed, Re-saving now...")
                if default_storage.exists(path):
                    default_storage.delete(path)

                time_ = time.time()
                return_json = [{'md5': md5, 'time': time_}]

                # store md5 value and parameters into database, store file
                print("saving upload file...")
                path = default_storage.save(
                    sub_base + md5, form_file
                )  # note this path doesnot include the media root, e.g. it is actually stored in "media/data/xxxxxx"
                file_path = settings.MEDIA_ROOT + '/' + path
                # distribute parameters details
                file_type = parameters['filetype']
                species = parameters['species']

                # insert to data base the info of file, paramerter and time
                # initial code 202
                print("create model instance")
                a = UploadParametersMD5(md5=md5,
                                        status=202,
                                        file_type=file_type,
                                        species=species,
                                        time=time_,
                                        path=file_path)
                a.save()

                return_json = [{
                    'md5': md5,
                    'time': time_,
                    'save_status': True
                }]
                return JsonResponse(return_json, safe=False)
            else:
                print("Form not valid")
                print("Error: ", form.errors)

        except Exception as e:
            print("Save to file Failed: ", e)
            return_json = [{'md5': "", 'time': 0.0, 'save_status': False}]
            return JsonResponse(return_json, safe=False)
Пример #33
0
def download(request, resourceid, sender=Layer):

    instance = resolve_object(
        request,
        sender, {'pk': resourceid},
        permission='base.download_resourcebase',
        permission_msg=_(
            "You are not permitted to save or edit this resource."))

    if isinstance(instance, Layer):
        try:
            upload_session = instance.get_upload_session()
            layer_files = [
                item for idx, item in enumerate(
                    LayerFile.objects.filter(upload_session=upload_session))
            ]

            # Create Target Folder
            dirpath = tempfile.mkdtemp()
            dir_time_suffix = get_dir_time_suffix()
            target_folder = os.path.join(dirpath, dir_time_suffix)
            if not os.path.exists(target_folder):
                os.makedirs(target_folder)

            # Copy all Layer related files into a temporary folder
            for l in layer_files:
                if storage.exists(l.file):
                    geonode_layer_path = storage.path(l.file)
                    base_filename, original_ext = os.path.splitext(
                        geonode_layer_path)
                    shutil.copy2(geonode_layer_path, target_folder)

            # Let's check for associated SLD files (if any)
            try:
                for s in instance.styles.all():
                    sld_file = os.path.join(target_folder,
                                            "".join([s.name, ".sld"]))
                    sld_file = open(sld_file, "w")
                    sld_file.write(s.sld_body.strip())
                    sld_file.close()

                    sld_file = open(sld_file, "w")
                    try:
                        response = requests.get(s.sld_url, timeout=TIMEOUT)
                        sld_remote_content = response.text
                        sld_file = os.path.join(
                            target_folder, "".join([s.name, "_remote.sld"]))
                        sld_file.write(sld_remote_content.strip())
                    except:
                        traceback.print_exc()
                        tb = traceback.format_exc()
                        logger.debug(tb)
                    finally:
                        sld_file.close()
            except:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # Let's dump metadata
            target_md_folder = os.path.join(target_folder, ".metadata")
            if not os.path.exists(target_md_folder):
                os.makedirs(target_md_folder)

            try:
                links = Link.objects.filter(resource=instance.resourcebase_ptr)
                for link in links:
                    link_name = custom_slugify(link.name)
                    link_file = os.path.join(
                        target_md_folder,
                        "".join([link_name, ".%s" % link.extension]))
                    if link.link_type in ('metadata', 'data', 'image'):
                        link_file = open(link_file, "wb")
                        try:
                            response = requests.get(link.url,
                                                    stream=True,
                                                    timeout=TIMEOUT)
                            response.raw.decode_content = True
                            shutil.copyfileobj(response.raw, link_file)
                        except:
                            traceback.print_exc()
                            tb = traceback.format_exc()
                            logger.debug(tb)
                        finally:
                            link_file.close()
                    elif link.link_type.startswith('OGC'):
                        link_file = open(link_file, "w")
                        link_file.write(link.url.strip())
                        link_file.close()
            except:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # ZIP everything and return
            target_file_name = "".join([instance.name, ".zip"])
            target_file = os.path.join(dirpath, target_file_name)
            zip_dir(target_folder, target_file)
            response = HttpResponse(content=open(target_file),
                                    status=200,
                                    content_type="application/zip")
            response[
                'Content-Disposition'] = 'attachment; filename="%s"' % target_file_name
            return response
        except NotImplementedError:
            traceback.print_exc()
            tb = traceback.format_exc()
            logger.debug(tb)
            return HttpResponse(json.dumps({'error': 'file_not_found'}),
                                status=404,
                                content_type="application/json")

    return HttpResponse(json.dumps({'error': 'unauthorized_request'}),
                        status=403,
                        content_type="application/json")
Пример #34
0
 def testExistsRelative(self):
     self.assertFalse(
         default_storage.exists("admin/css/../img/sorting-icons.svg"))
     with self.save_file("admin/img/sorting-icons.svg"):
         self.assertTrue(
             default_storage.exists("admin/css/../img/sorting-icons.svg"))
Пример #35
0
 def testExists(self):
     self.assertFalse(default_storage.exists("foo.txt"))
     with self.save_file():
         self.assertTrue(default_storage.exists("foo.txt"))
         self.assertFalse(default_storage.exists("fo"))
Пример #36
0
 def _unprep_package(self, name):
     package = packager_path(name)
     if storage.exists(package):
         storage.delete(package)
Пример #37
0
 def test_move_chunking(self):
     src = self.newfile('src.txt', '<contents>')
     dest = self.path('somedir/dest.txt')
     move_stored_file(src, dest, chunk_size=1)
     assert self.contents(dest) == b'<contents>'
     assert not storage.exists(src)
Пример #38
0
def create_thumbnail(instance,
                     thumbnail_remote_url,
                     thumbnail_create_url=None,
                     check_bbox=False,
                     ogc_client=None,
                     overwrite=False,
                     width=240,
                     height=200):
    thumbnail_dir = os.path.join(settings.MEDIA_ROOT, 'thumbs')
    if not os.path.exists(thumbnail_dir):
        os.makedirs(thumbnail_dir)
    thumbnail_name = None
    if isinstance(instance, Layer):
        thumbnail_name = 'layer-%s-thumb.png' % instance.uuid
    elif isinstance(instance, Map):
        thumbnail_name = 'map-%s-thumb.png' % instance.uuid
    thumbnail_path = os.path.join(thumbnail_dir, thumbnail_name)
    if overwrite or not storage.exists(thumbnail_path):
        BBOX_DIFFERENCE_THRESHOLD = 1e-5

        if not thumbnail_create_url:
            thumbnail_create_url = thumbnail_remote_url

        if check_bbox:
            # Check if the bbox is invalid
            valid_x = (float(instance.bbox_x0) -
                       float(instance.bbox_x1))**2 > BBOX_DIFFERENCE_THRESHOLD
            valid_y = (float(instance.bbox_y1) -
                       float(instance.bbox_y0))**2 > BBOX_DIFFERENCE_THRESHOLD
        else:
            valid_x = True
            valid_y = True

        image = None

        if valid_x and valid_y:
            Link.objects.get_or_create(resource=instance.get_self_resource(),
                                       url=thumbnail_remote_url,
                                       defaults=dict(
                                           extension='png',
                                           name="Remote Thumbnail",
                                           mime='image/png',
                                           link_type='image',
                                       ))
            ResourceBase.objects.filter(id=instance.id) \
                .update(thumbnail_url=thumbnail_remote_url)

            # Download thumbnail and save it locally.
            if not ogc_client:
                ogc_client = http_client

            if ogc_client:
                try:
                    params = {'width': width, 'height': height}
                    # Add the bbox param only if the bbox is different to [None, None,
                    # None, None]
                    if None not in instance.bbox:
                        params['bbox'] = instance.bbox_string
                        params['crs'] = instance.srid

                    for _p in params.keys():
                        if _p.lower() not in thumbnail_create_url.lower():
                            thumbnail_create_url = thumbnail_create_url + '&%s=%s' % (
                                _p, params[_p])
                    resp, image = ogc_client.request(thumbnail_create_url)
                    if 'ServiceException' in image or \
                       resp.status_code < 200 or resp.status_code > 299:
                        msg = 'Unable to obtain thumbnail: %s' % image
                        logger.error(msg)

                        # Replace error message with None.
                        image = None
                except BaseException as e:
                    logger.exception(e)

                    # Replace error message with None.
                    image = None

            if check_ogc_backend(geoserver.BACKEND_PACKAGE):
                if image is None and instance.bbox:
                    instance_bbox = instance.bbox[0:4]
                    request_body = {
                        'bbox': [str(coord) for coord in instance_bbox],
                        'srid': instance.srid,
                        'width': width,
                        'height': height
                    }
                    if thumbnail_create_url:
                        request_body[
                            'thumbnail_create_url'] = thumbnail_create_url
                    elif instance.alternate:
                        request_body['layers'] = instance.alternate
                    image = _prepare_thumbnail_body_from_opts(request_body)

                if image is not None:
                    instance.save_thumbnail(thumbnail_name, image=image)
                else:
                    msg = 'Unable to obtain thumbnail for: %s' % instance
                    logger.error(msg)
def thumbnail(image_url, width, height, quality=95):
    """
    Given the URL to an image, resizes the image using the given width and
    height on the first time it is requested, and returns the URL to the new
    resized image. if width or height are zero then original ratio is
    maintained.
    """
    if not image_url:
        return ""

    image_url = unquote(unicode(image_url))
    if image_url.startswith(settings.MEDIA_URL):
        image_url = image_url.replace(settings.MEDIA_URL, "", 1)
    image_dir, image_name = os.path.split(image_url)
    image_prefix, image_ext = os.path.splitext(image_name)
    filetype = {".png": "PNG", ".gif": "GIF"}.get(image_ext, "JPEG")
    thumb_name = "%s-%sx%s%s" % (image_prefix, width, height, image_ext)
    thumb_dir = os.path.join(settings.MEDIA_ROOT, image_dir,
                             settings.THUMBNAILS_DIR_NAME)
    if not os.path.exists(thumb_dir):
        os.makedirs(thumb_dir)
    thumb_path = os.path.join(thumb_dir, thumb_name)
    thumb_url = "%s/%s" % (settings.THUMBNAILS_DIR_NAME,
                           quote(thumb_name.encode("utf-8")))
    image_url_path = os.path.dirname(image_url)
    if image_url_path:
        thumb_url = "%s/%s" % (image_url_path, thumb_url)

    try:
        thumb_exists = os.path.exists(thumb_path)
    except UnicodeEncodeError:
        # The image that was saved to a filesystem with utf-8 support,
        # but somehow the locale has changed and the filesystem does not
        # support utf-8.
        from mezzanine.core.exceptions import FileSystemEncodingChanged
        raise FileSystemEncodingChanged()
    if thumb_exists:
        # Thumbnail exists, don't generate it.
        return thumb_url
    elif not default_storage.exists(image_url):
        # Requested image does not exist, just return its URL.
        return image_url

    image = Image.open(default_storage.open(image_url))
    image_info = image.info
    width = int(width)
    height = int(height)

    # If already right size, don't do anything.
    if width == image.size[0] and height == image.size[1]:
        return image_url
    # Set dimensions.
    if width == 0:
        width = image.size[0] * height / image.size[1]
    elif height == 0:
        height = image.size[1] * width / image.size[0]
    if image.mode not in ("L", "RGBA"):
        image = image.convert("RGBA")
    # Required for progressive jpgs.
    ImageFile.MAXBLOCK = image.size[0] * image.size[1]
    try:
        image = ImageOps.fit(image, (width, height), Image.ANTIALIAS)
        image = image.save(thumb_path, filetype, quality=quality, **image_info)
        # Push a remote copy of the thumbnail if MEDIA_URL is
        # absolute.
        if "://" in settings.MEDIA_URL:
            with open(thumb_path, "r") as f:
                default_storage.save(thumb_url, File(f))
    except Exception:
        # If an error occurred, a corrupted image may have been saved,
        # so remove it, otherwise the check for it existing will just
        # return the corrupted image next time it's requested.
        try:
            os.remove(thumb_path)
        except Exception:
            pass
        return image_url
    return thumb_url
Пример #40
0
def import_dir_locked():
    arp_root = get_arp_root()
    return default_storage.exists(settings.ARP_IMPORT_LOCK)
Пример #41
0
def get_arp_root():
    if not default_storage.exists(settings.ARP_ROOT):
        os.mkdir(default_storage.path(settings.ARP_ROOT))
    return settings.ARP_ROOT
Пример #42
0
 def create_paths(self):
     if not storage.exists(self.file.file_path):
         with storage.open(self.file.file_path, 'w') as f:
             f.write('test data\n')
Пример #43
0
def upload_to_internet_archive(self, link_guid):
    try:
        link = Link.objects.get(guid=link_guid)

        if link.internet_archive_upload_status == 'failed_permanently':
            return

    except:
        print "Link %s does not exist" % link_guid
        return

    if not settings.UPLOAD_TO_INTERNET_ARCHIVE:
        return

    if not link.can_upload_to_internet_archive():
        print "Not eligible for upload."
        return

    metadata = {
        "collection":
        settings.INTERNET_ARCHIVE_COLLECTION,
        "title":
        '%s: %s' % (link_guid, truncatechars(link.submitted_title, 50)),
        "mediatype":
        'web',
        "description":
        'Perma.cc archive of %s created on %s.' % (
            link.submitted_url,
            link.creation_timestamp,
        ),
        "contributor":
        'Perma.cc',
        "submitted_url":
        link.submitted_url,
        "perma_url":
        "http://%s/%s" % (settings.HOST, link_guid),
        "external-identifier":
        'urn:X-perma:%s' % link_guid,
    }

    identifier = settings.INTERNET_ARCHIVE_IDENTIFIER_PREFIX + link_guid
    try:
        if default_storage.exists(link.warc_storage_file()):
            item = internetarchive.get_item(identifier)

            # if item already exists (but has been removed),
            # ia won't update its metadata in upload function
            if item.exists and item.metadata['title'] == 'Removed':
                item.modify_metadata(
                    metadata,
                    access_key=settings.INTERNET_ARCHIVE_ACCESS_KEY,
                    secret_key=settings.INTERNET_ARCHIVE_SECRET_KEY,
                )

            warc_name = os.path.basename(link.warc_storage_file())

            # copy warc to local disk storage for upload
            temp_warc_file = tempfile.TemporaryFile()
            copy_file_data(default_storage.open(link.warc_storage_file()),
                           temp_warc_file)
            temp_warc_file.seek(0)

            success = internetarchive.upload(
                identifier,
                {warc_name: temp_warc_file},
                metadata=metadata,
                access_key=settings.INTERNET_ARCHIVE_ACCESS_KEY,
                secret_key=settings.INTERNET_ARCHIVE_SECRET_KEY,
                retries=10,
                retries_sleep=60,
                verbose=True,
            )

            if success:
                link.internet_archive_upload_status = 'completed'
                link.save()

            else:
                link.internet_archive_upload_status = 'failed'
                self.retry(
                    exc=Exception("Internet Archive reported upload failure."))

            return
        else:
            link.internet_archive_upload_status = 'failed_permanently'
            link.save()

    except requests.ConnectionError as e:
        logger.exception(
            "Upload to Internet Archive task failed because of a connection error. \nLink GUID: %s\nError: %s"
            % (link.pk, e))
        return
Пример #44
0
 def file_exists(self):
     return default_storage.exists(str(self.image))
Пример #45
0
def handle_upload_validation_result(results, upload_pk, channel):
    """Annotate a set of validation results and save them to the given
    FileUpload instance."""
    upload = FileUpload.objects.get(pk=upload_pk)

    if (not upload.addon_id or
            not upload.addon.find_latest_version(channel=channel, exclude=())):
        # Legacy submission restrictions apply if:
        # - It's the very first upload (there is no addon id yet)
        # - It's the first upload in that channel
        results = annotate_new_legacy_addon_restrictions(results=results)
    if upload.addon_id and upload.version:
        results = annotate_webext_incompatibilities(
            results=results,
            file_=None,
            addon=upload.addon,
            version_string=upload.version,
            channel=channel)

    results = skip_signing_warning_if_signing_server_not_configured(results)
    upload.validation = json.dumps(results)
    upload.save()  # We want to hit the custom save().

    # Track the time it took from first upload through validation
    # until the results were processed and saved.
    upload_start = utc_millesecs_from_epoch(upload.created)
    now = datetime.datetime.now()
    now_ts = utc_millesecs_from_epoch(now)
    delta = now_ts - upload_start
    statsd.timing('devhub.validation_results_processed', delta)

    if not storage.exists(upload.path):
        # TODO: actually fix this so we can get stats. It seems that
        # the file maybe gets moved but it needs more investigation.
        log.warning('Scaled upload stats were not tracked. File is '
                    'missing: {}'.format(upload.path))
        return

    size = Decimal(storage.size(upload.path))
    megabyte = Decimal(1024 * 1024)

    # Stash separate metrics for small / large files.
    quantifier = 'over' if size > megabyte else 'under'
    statsd.timing(
        'devhub.validation_results_processed_{}_1mb'.format(quantifier), delta)

    # Scale the upload / processing time by package size (in MB)
    # so we can normalize large XPIs which naturally take longer to validate.
    scaled_delta = None
    size_in_mb = size / megabyte
    if size > 0:
        # If the package is smaller than 1MB, don't scale it. This should
        # help account for validator setup time.
        unit = size_in_mb if size > megabyte else Decimal(1)
        scaled_delta = Decimal(delta) / unit
        statsd.timing('devhub.validation_results_processed_per_mb',
                      scaled_delta)

    log.info('Time to process and save upload validation; '
             'upload.pk={upload}; processing_time={delta}; '
             'scaled_per_mb={scaled}; upload_size_in_mb={size_in_mb}; '
             'created={created}; now={now}'.format(delta=delta,
                                                   upload=upload.pk,
                                                   created=upload.created,
                                                   now=now,
                                                   scaled=scaled_delta,
                                                   size_in_mb=size_in_mb))
Пример #46
0
 def testExistsDir(self):
     self.assertFalse(default_storage.exists("foo/"))
     with self.save_file(name="foo/bar.txt"):
         self.assertTrue(default_storage.exists("foo/"))
Пример #47
0
 def file_exists(self):
     return default_storage.exists(self.file.name)
Пример #48
0
def thumbnail(
    image_url,
    width,
    height,
    upscale=True,
    quality=95,
    left=0.5,
    top=0.5,
    padding=False,
    padding_color="#fff",
):
    """
    Given the URL to an image, resizes the image using the given width
    and height on the first time it is requested, and returns the URL
    to the new resized image. If width or height are zero then original
    ratio is maintained. When ``upscale`` is False, images smaller than
    the given size will not be grown to fill that size. The given width
    and height thus act as maximum dimensions.
    """

    if not image_url:
        return ""
    try:
        from PIL import Image, ImageFile, ImageOps
    except ImportError:
        return ""

    image_url = unquote(str(image_url)).split("?")[0]
    if image_url.startswith(settings.MEDIA_URL):
        image_url = image_url.replace(settings.MEDIA_URL, "", 1)
    image_dir, image_name = os.path.split(image_url)
    image_prefix, image_ext = os.path.splitext(image_name)
    filetype = {".png": "PNG", ".gif": "GIF"}.get(image_ext.lower(), "JPEG")
    thumb_name = "%s-%sx%s" % (image_prefix, width, height)
    if not upscale:
        thumb_name += "-no-upscale"
    if left != 0.5 or top != 0.5:
        left = min(1, max(0, left))
        top = min(1, max(0, top))
        thumb_name = "%s-%sx%s" % (thumb_name, left, top)
    thumb_name += "-padded-%s" % padding_color if padding else ""
    thumb_name = "%s%s" % (thumb_name, image_ext)

    # `image_name` is used here for the directory path, as each image
    # requires its own sub-directory using its own name - this is so
    # we can consistently delete all thumbnails for an individual
    # image, which is something we do in filebrowser when a new image
    # is written, allowing us to purge any previously generated
    # thumbnails that may match a new image name.
    thumb_dir = os.path.join(
        settings.MEDIA_ROOT, image_dir, settings.THUMBNAILS_DIR_NAME, image_name
    )
    if not os.path.exists(thumb_dir):
        try:
            os.makedirs(thumb_dir)
        except OSError:
            pass

    thumb_path = os.path.join(thumb_dir, thumb_name)
    thumb_url = "%s/%s/%s" % (
        settings.THUMBNAILS_DIR_NAME,
        quote(image_name.encode("utf-8")),
        quote(thumb_name.encode("utf-8")),
    )
    image_url_path = os.path.dirname(image_url)
    if image_url_path:
        thumb_url = "%s/%s" % (image_url_path, thumb_url)

    try:
        thumb_exists = os.path.exists(thumb_path)
    except UnicodeEncodeError:
        # The image that was saved to a filesystem with utf-8 support,
        # but somehow the locale has changed and the filesystem does not
        # support utf-8.
        from mezzanine.core.exceptions import FileSystemEncodingChanged

        raise FileSystemEncodingChanged()
    if thumb_exists:
        # Thumbnail exists, don't generate it.
        return thumb_url
    elif not default_storage.exists(image_url):
        # Requested image does not exist, just return its URL.
        return image_url

    f = default_storage.open(image_url)
    try:
        image = Image.open(f)
    except:  # noqa
        # Invalid image format.
        return image_url

    image_info = image.info

    # Transpose to align the image to its orientation if necessary.
    # If the image is transposed, delete the exif information as
    # not all browsers support the CSS image-orientation:
    # - http://caniuse.com/#feat=css-image-orientation
    try:
        orientation = image._getexif().get(0x0112)
    except:  # noqa
        orientation = None
    if orientation:
        methods = {
            2: (Image.FLIP_LEFT_RIGHT,),
            3: (Image.ROTATE_180,),
            4: (Image.FLIP_TOP_BOTTOM,),
            5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
            6: (Image.ROTATE_270,),
            7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
            8: (Image.ROTATE_90,),
        }.get(orientation, ())
        if methods:
            image_info.pop("exif", None)
            for method in methods:
                image = image.transpose(method)

    to_width = int(width)
    to_height = int(height)
    from_width = image.size[0]
    from_height = image.size[1]

    if not upscale:
        to_width = min(to_width, from_width)
        to_height = min(to_height, from_height)

    # Set dimensions.
    if to_width == 0:
        to_width = from_width * to_height // from_height
    elif to_height == 0:
        to_height = from_height * to_width // from_width
    if image.mode not in ("P", "L", "RGBA") and filetype not in ("JPG", "JPEG"):
        try:
            image = image.convert("RGBA")
        except:  # noqa
            return image_url
    # Required for progressive jpgs.
    ImageFile.MAXBLOCK = 2 * (max(image.size) ** 2)

    # Padding.
    if padding and to_width and to_height:
        from_ratio = float(from_width) / from_height
        to_ratio = float(to_width) / to_height
        pad_size = None
        if to_ratio < from_ratio:
            pad_height = int(to_height * (float(from_width) / to_width))
            pad_size = (from_width, pad_height)
            pad_top = (pad_height - from_height) // 2
            pad_left = 0
        elif to_ratio > from_ratio:
            pad_width = int(to_width * (float(from_height) / to_height))
            pad_size = (pad_width, from_height)
            pad_top = 0
            pad_left = (pad_width - from_width) // 2
        if pad_size is not None:
            pad_container = Image.new("RGBA", pad_size, padding_color)
            pad_container.paste(image, (pad_left, pad_top))
            image = pad_container

    # Create the thumbnail.
    to_size = (to_width, to_height)
    to_pos = (left, top)
    try:
        image = ImageOps.fit(image, to_size, Image.ANTIALIAS, 0, to_pos)
        image = image.save(thumb_path, filetype, quality=quality, **image_info)
        # Push a remote copy of the thumbnail if MEDIA_URL is
        # absolute.
        if "://" in settings.MEDIA_URL:
            with open(thumb_path, "rb") as f:
                default_storage.save(unquote(thumb_url), File(f))
    except Exception:
        # If an error occurred, a corrupted image may have been saved,
        # so remove it, otherwise the check for it existing will just
        # return the corrupted image next time it's requested.
        try:
            os.remove(thumb_path)
        except Exception:
            pass
        return image_url
    return thumb_url
Пример #49
0
    def test_moderated_upload(self):
        """
        Test if moderation flag works
        """
        with self.settings(ADMIN_MODERATE_UPLOADS=False):
            self.client.login(username=self.user, password=self.passwd)

            input_path = self._get_input_path()

            with open(input_path, 'rb') as f:
                data = {
                    'title': 'document title',
                    'doc_file': f,
                    'resource': '',
                    'extension': 'txt',
                    'permissions': '{}',
                }
                resp = self.client.post(self.document_upload_url, data=data)
                self.assertEqual(resp.status_code, 200)
            dname = 'document title'
            _d = Document.objects.get(title=dname)

            self.assertTrue(_d.is_published)
            uuid = _d.uuid
            _d.delete()

            from geonode.documents.utils import delete_orphaned_document_files
            _, document_files_before = storage.listdir(
                os.path.join("documents", "document"))
            deleted = delete_orphaned_document_files()
            _, document_files_after = storage.listdir(
                os.path.join("documents", "document"))
            self.assertTrue(len(deleted) > 0)
            self.assertEqual(
                set(deleted),
                set(document_files_before) - set(document_files_after))

            from geonode.base.utils import delete_orphaned_thumbs
            thumb_files_before = get_thumbs()
            deleted = delete_orphaned_thumbs()
            thumb_files_after = get_thumbs()
            if len(thumb_files_before):
                self.assertTrue(
                    len(deleted) > 0,
                    f"before: {thumb_files_before} - deleted: {deleted} - after: {thumb_files_after}"
                )
                self.assertEqual(
                    set(deleted),
                    set(thumb_files_before) - set(thumb_files_after),
                    f"deleted: {deleted} vs {set(thumb_files_before) - set(thumb_files_after)}"
                )

            fn = os.path.join(os.path.join("documents", "document"),
                              os.path.basename(input_path))
            self.assertFalse(storage.exists(fn))

            files = [thumb for thumb in get_thumbs() if uuid in thumb]
            self.assertEqual(len(files), 0)

        with self.settings(ADMIN_MODERATE_UPLOADS=True):
            self.client.login(username=self.user, password=self.passwd)

            norman = get_user_model().objects.get(username="******")
            group = GroupProfile.objects.get(slug="bar")
            input_path = self._get_input_path()
            with open(input_path, 'rb') as f:
                data = {
                    'title': 'document title',
                    'doc_file': f,
                    'resource': '',
                    'extension': 'txt',
                    'permissions': '{}',
                }
                resp = self.client.post(self.document_upload_url, data=data)
                self.assertEqual(resp.status_code, 200)
            dname = 'document title'
            _d = Document.objects.get(title=dname)
            self.assertFalse(_d.is_approved)
            self.assertTrue(_d.is_published)

            group.join(norman)
            self.assertFalse(group.user_is_role(norman, "manager"))
            GroupMember.objects.get(group=group, user=norman).promote()
            self.assertTrue(group.user_is_role(norman, "manager"))

            self.client.login(username="******", password="******")
            resp = self.client.get(reverse('document_detail', args=(_d.id, )))
            # Forbidden
            self.assertEqual(resp.status_code, 403)
            _d.group = group.group
            _d.save()
            resp = self.client.get(reverse('document_detail', args=(_d.id, )))
            # Allowed - edit permissions
            self.assertEqual(resp.status_code, 200)
            perms_list = get_perms(norman, _d.get_self_resource()) + get_perms(
                norman, _d)
            self.assertTrue('change_resourcebase_metadata' in perms_list)
            GroupMember.objects.get(group=group, user=norman).demote()
            self.assertFalse(group.user_is_role(norman, "manager"))
            resp = self.client.get(reverse('document_detail', args=(_d.id, )))
            # Allowed - no edit
            self.assertEqual(resp.status_code, 200)
            perms_list = get_perms(norman, _d.get_self_resource()) + get_perms(
                norman, _d)
            self.assertFalse('change_resourcebase_metadata' in perms_list)
            group.leave(norman)
Пример #50
0
 def delete_photo(self, photo):
     if default_storage.exists(photo.name):
         default_storage.delete(photo.name)
Пример #51
0
def run_validator(path, for_appversions=None, test_all_tiers=False,
                  overrides=None, compat=False, listed=True):
    """A pre-configured wrapper around the addon validator.

    *file_path*
        Path to addon / extension file to validate.

    *for_appversions=None*
        An optional dict of application versions to validate this addon
        for. The key is an application GUID and its value is a list of
        versions.

    *test_all_tiers=False*
        When False (default) the validator will not continue if it
        encounters fatal errors.  When True, all tests in all tiers are run.
        See bug 615426 for discussion on this default.

    *overrides=None*
        Normally the validator gets info from the manifest but there are a
        few things we need to override. See validator for supported overrides.
        Example: {'targetapp_maxVersion': {'<app guid>': '<version>'}}

    *compat=False*
        Set this to `True` when performing a bulk validation. This allows the
        validator to ignore certain tests that should not be run during bulk
        validation (see bug 735841).

    *listed=True*
        If the addon is unlisted, treat it as if it was a self hosted one
        (don't fail on the presence of an updateURL).

    To validate the addon for compatibility with Firefox 5 and 6,
    you'd pass in::

        for_appversions={amo.FIREFOX.guid: ['5.0.*', '6.0.*']}

    Not all application versions will have a set of registered
    compatibility tests.
    """
    from validator.validate import validate

    apps = dump_apps.Command.JSON_PATH
    if not os.path.exists(apps):
        call_command('dump_apps')

    with NamedTemporaryFile(suffix='_' + os.path.basename(path)) as temp:
        if path and not os.path.exists(path) and storage.exists(path):
            # This file doesn't exist locally. Write it to our
            # currently-open temp file and switch to that path.
            copyfileobj(storage.open(path), temp.file)
            path = temp.name

        with statsd.timer('devhub.validator'):
            json_result = validate(
                path,
                for_appversions=for_appversions,
                format='json',
                # When False, this flag says to stop testing after one
                # tier fails.
                determined=test_all_tiers,
                approved_applications=apps,
                overrides=overrides,
                compat_test=compat,
                listed=listed
            )

        track_validation_stats(json_result)

        return json_result
Пример #52
0
 def has_been_copied(self):
     """Checks if file has been copied to mirror"""
     if not self.mirror_file_path:
         return False
     return storage.exists(self.mirror_file_path)
Пример #53
0
 def test_file_on_mirror(self):
     # Make sure the mirror dir is clear.
     if storage.exists(os.path.dirname(self.file.mirror_file_path)):
         rmtree(os.path.dirname(self.file.mirror_file_path))
     new_file = tasks.repackage_jetpack(self.builder_data())
     assert storage.exists(new_file.mirror_file_path)
Пример #54
0
    def test_cachedir_tag(self):
        self.assertTrue(default_storage.exists(PHOTOLOGUE_CACHEDIRTAG))

        content = default_storage.open(PHOTOLOGUE_CACHEDIRTAG).read()
        self.assertEquals(content, b"Signature: 8a477f597d28d172789f06886806bc55")
Пример #55
0
    def create(self, request, *args, **kwargs):
        """
        简化版大文件上传接口, 同时可处理大文件上传, 但不支持断点续传
        """
        chunk_file = request.data.get('chunk_file', None)  # 分块文件
        chunk_md5 = request.data.get('chunk_md5', None)  # 分块文件MD5
        chunk_index = request.data.get('chunk_index', None)  # 分块文件顺序
        chunks_num = request.data.get('chunks_num', None)  # 完整文件分块数目,用于确定当前分块是否已经全部上传完毕
        file_name = request.data.get('file_name', None)  # 完整文件的文件名称,用于所有分块上传完毕后合并的名称
        file_md5 = request.data.get('file_md5', None)  # 完整文件的文件md5,用于确定文件合并后是否正确
        group_id = request.data.get('group', None)
        file_type = request.data.get('file_type', None)
        if not chunk_file or not chunk_index or not chunks_num or not file_name:
            result = {'success': False,
                      'messages': '上传失败, 缺少指定参数值: chunk_file/chunk_index/chunks_num/file_name'}
            return Response(result, status=status.HTTP_400_BAD_REQUEST)

        file_type_name = FileType.get(int(file_type), 'default')
        try:
            group = FileGroup.objects.get(pk=int(group_id))
            group_name = group.name
        except:
            group = None
            group_name = 'None'
        file_path = f'{file_type_name}/{request.user.username}/{group_name}'
        base_path = create_or_get_directory(f'{settings.MEDIA_ROOT}/{file_path}')
        base_chunk_path = create_or_get_directory(f'{base_path}/{file_name}-tmp')
        chunk_path = os.path.join(base_chunk_path, f'{file_name}.part{chunk_index}')
        # default_storage不会覆盖文件, 若文件存在, 删除后重新上传
        if default_storage.exists(chunk_path):
            default_storage.delete(chunk_path)
        # 保存
        default_storage.save(chunk_path, ContentFile(chunk_file.read()))
        # 验证分块MD5是否正确
        if chunk_md5:
            chunk_file_md5 = check_md5_sum(file_name=chunk_path)
            # 保存的分块文件内容不一致
            if chunk_file_md5 != chunk_md5:
                result = {'success': False, 'messages': '文件上传发生错误, md5值不一致',
                          'results': {'upload_md5': chunk_md5, 'save_md5': chunk_file_md5}}
                return Response(result, status=status.HTTP_400_BAD_REQUEST)
        if int(chunk_index) == int(chunks_num):
            uploaded = True
            save_file_path = os.path.join(base_path, file_name)
            # 文件已经存在, 添加时间戳
            if os.path.exists(save_file_path):
                save_file_path = os.path.join(base_path, f'{int(time.time())}-{file_name}')
            with open(save_file_path, 'wb') as uploaded_file:
                for index in range(int(chunks_num)):
                    chunk_file = os.path.join(base_chunk_path, f'{file_name}.part{index + 1}')
                    try:
                        chunk_file = open(chunk_file, 'rb')  # 按序打开每个分片
                        uploaded_file.write(chunk_file.read())  # 读取分片内容写入新文件
                        chunk_file.close()
                    except Exception as error:
                        print(f'合并文件:{file_name} form {base_chunk_path}失败:{error}')
                        uploaded = False
                        # 检查合并后的MD5
            uploaded_file_md5 = check_md5_sum(save_file_path)
            if uploaded_file_md5 != file_md5:
                uploaded = False
            if uploaded:
                attachment = FileAttachment.objects.create(group=group, file_name=file_name,
                                                           file_path=os.path.join(file_path, file_name),
                                                           file_size=os.path.getsize(save_file_path),
                                                           file_type=file_type, creator=request.user)
                shutil.rmtree(base_chunk_path)
                action_log(request=request, user=request.user, action_type=UploadAction, old_instance=None,
                           instance=attachment, action_info=f'上传素材:{attachment.__str__()}')
                serializer = self.get_serializer(attachment)
                results = serializer.data
                results['uploaded'] = True
                result = {'success': True, 'messages': f'新增素材:{attachment.__str__()}', 'results': results}
                return Response(result, status=status.HTTP_200_OK)
            else:
                result = {'success': False, 'messages': '合并文件失败, 请重新上传'}
                return Response(result, status=status.HTTP_400_BAD_REQUEST)
        else:
            result = {'success': True, 'messages': f'成功上传文件:{file_name}, 分块:{chunk_index}',
                      'results': {'uploaded': False, 'chunk_index': chunk_index, 'file_name': file_name, }
                      }
            return Response(result, status=status.HTTP_200_OK)
Пример #56
0
def remove_icons(destination):
    for size in ADDON_ICON_SIZES:
        filename = '%s-%s.png' % (destination, size)
        if storage.exists(filename):
            storage.delete(filename)
Пример #57
0
def photo_size(request,
               id,
               size,
               crop=False,
               quality=90,
               download=False,
               constrain=False):
    """
    Renders image and returns response
    Does not use template
    Saves resized image within cache system
    Returns 404 if if image rendering fails
    """

    if isinstance(quality, str) and quality.isdigit():
        quality = int(quality)

    cache_key = generate_image_cache_key(file=id,
                                         size=size,
                                         pre_key=PHOTO_PRE_KEY,
                                         crop=crop,
                                         unique_key=id,
                                         quality=quality,
                                         constrain=constrain)
    cached_image = cache.get(cache_key)
    if cached_image:
        return redirect('{0}{1}'.format(
            get_setting('site', 'global', 'siteurl'), cached_image))

    photo = get_object_or_404(Image, id=id)
    size = [int(s) for s in size.split('x')]
    size = aspect_ratio(photo.image_dimensions(), size, constrain)

    # check permissions
    if not has_perm(request.user, 'photos.view_image', photo):
        raise Http403

    attachment = ''
    if download:
        attachment = 'attachment;'

    if not photo.image or not default_storage.exists(photo.image.name):
        raise Http404

    # At this point, we didn't get the image from the cache.
    # Check if this particular thumbnail already exists on file system.
    # If it's there, no need to rebuild it from the original image!
    file_name = photo.image_filename()
    file_path = 'cached%s%s' % (request.path, file_name)
    if default_storage.exists(file_path):
        image = get_image_from_path(
            os.path.join(settings.MEDIA_ROOT, file_path))
    else:
        # gets resized image from cache or rebuild
        image = get_image(photo.image,
                          size,
                          PHOTO_PRE_KEY,
                          crop=crop,
                          quality=quality,
                          unique_key=str(photo.pk),
                          constrain=constrain)

    # if image not rendered; quit
    if not image:
        raise Http404

    response = HttpResponse(content_type='image/jpeg')
    response['Content-Disposition'] = '%s filename="%s"' % (
        attachment, photo.image_filename())
    image.convert('RGB').save(response, "JPEG", quality=quality)

    if photo.is_public_photo() and photo.is_public_photoset():
        if not default_storage.exists(file_path):
            default_storage.save(file_path, ContentFile(response.content))
        full_file_path = "%s%s" % (settings.MEDIA_URL, file_path)
        cache.set(cache_key, full_file_path)
        cache_group_key = "photos_cache_set.%s" % photo.pk
        cache_group_list = cache.get(cache_group_key)

        if cache_group_list is None:
            cache.set(cache_group_key, [cache_key])
        else:
            cache_group_list += [cache_key]
            cache.set(cache_group_key, cache_group_list)

    return response
Пример #58
0
def create_thumbnail(instance,
                     thumbnail_remote_url,
                     thumbnail_create_url=None,
                     check_bbox=False,
                     ogc_client=None,
                     overwrite=False):
    thumbnail_dir = os.path.join(settings.MEDIA_ROOT, 'thumbs')
    if not os.path.exists(thumbnail_dir):
        os.makedirs(thumbnail_dir)
    thumbnail_name = None
    if isinstance(instance, Layer):
        thumbnail_name = 'layer-%s-thumb.png' % instance.uuid
    elif isinstance(instance, Map):
        thumbnail_name = 'map-%s-thumb.png' % instance.uuid
    thumbnail_path = os.path.join(thumbnail_dir, thumbnail_name)
    if overwrite is True or storage.exists(thumbnail_path) is False:
        if not ogc_client:
            ogc_client = http_client
        BBOX_DIFFERENCE_THRESHOLD = 1e-5

        if not thumbnail_create_url:
            thumbnail_create_url = thumbnail_remote_url

        if check_bbox:
            # Check if the bbox is invalid
            valid_x = (float(instance.bbox_x0) -
                       float(instance.bbox_x1))**2 > BBOX_DIFFERENCE_THRESHOLD
            valid_y = (float(instance.bbox_y1) -
                       float(instance.bbox_y0))**2 > BBOX_DIFFERENCE_THRESHOLD
        else:
            valid_x = True
            valid_y = True

        image = None

        if valid_x and valid_y:
            Link.objects.get_or_create(resource=instance.get_self_resource(),
                                       url=thumbnail_remote_url,
                                       defaults=dict(
                                           extension='png',
                                           name="Remote Thumbnail",
                                           mime='image/png',
                                           link_type='image',
                                       ))
            ResourceBase.objects.filter(id=instance.id) \
                .update(thumbnail_url=thumbnail_remote_url)

            # Download thumbnail and save it locally.
            try:
                resp, image = ogc_client.request(thumbnail_create_url)
                if 'ServiceException' in image or \
                   resp.status < 200 or resp.status > 299:
                    msg = 'Unable to obtain thumbnail: %s' % image
                    raise Exception(msg)
            except BaseException:
                import traceback
                logger.debug(traceback.format_exc())

                # Replace error message with None.
                image = None

        if image is not None:
            instance.save_thumbnail(thumbnail_name, image=image)
Пример #59
0
 def testDelete(self):
     with self.save_file():
         self.assertTrue(default_storage.exists("foo.txt"))
         default_storage.delete("foo.txt")
     self.assertFalse(default_storage.exists("foo.txt"))
Пример #60
0
 def clean_files(self, f):
     if f.mirror_file_path and storage.exists(f.mirror_file_path):
         storage.delete(f.mirror_file_path)
     if not storage.exists(f.file_path):
         with storage.open(f.file_path, 'w') as fp:
             fp.write('sample data\n')