Пример #1
0
    def retrieve(self, request, pk, *args, **kwargs):
        throttle = throttling.ImportDumpModeRateThrottle()

        if not throttle.allow_request(request, self):
            self.throttled(request, throttle.wait())

        project = get_object_or_404(self.get_queryset(), pk=pk)
        self.check_permissions(request, 'export_project', project)

        dump_format = request.QUERY_PARAMS.get("dump_format", "plain")

        if settings.CELERY_ENABLED:
            task = tasks.dump_project.delay(request.user, project, dump_format)
            tasks.delete_project_dump.apply_async((project.pk, project.slug, task.id, dump_format),
                                                  countdown=settings.EXPORTS_TTL)
            return response.Accepted({"export_id": task.id})

        if dump_format == "gzip":
            path = "exports/{}/{}-{}.json.gz".format(project.pk, project.slug, uuid.uuid4().hex)
            storage_path = default_storage.path(path)
            with default_storage.open(storage_path, mode="wb") as outfile:
                services.render_project(project, gzip.GzipFile(fileobj=outfile))
        else:
            path = "exports/{}/{}-{}.json".format(project.pk, project.slug, uuid.uuid4().hex)
            storage_path = default_storage.path(path)
            with default_storage.open(storage_path, mode="wb") as outfile:
                services.render_project(project, outfile)

        response_data = {
            "url": default_storage.url(path)
        }
        return response.Ok(response_data)
Пример #2
0
def image_ratio_ajax(request, upload_to=None, form_class=FileForm):
    """
    Processes ajax post from imagescaler.
    """
    form = form_class(request.POST, request.FILES)
    if form.is_valid():
        uploaded_file = request.FILES['file']
        if uploaded_file.content_type in IMAGE_TYPES:
            file_name, extension = os.path.splitext(uploaded_file.name)
            safe_name = '{0}{1}'.format(FILENAME_NORMALIZER(file_name),
                                        extension)
            name = os.path.join(upload_to or UPLOAD_PATH, safe_name)
            path = default_storage.save(name, uploaded_file)
            full_path = default_storage.path(path)
            try:
                os.chmod(full_path, 0660)
            except Exception:
                print sys.exc_info()
            if settings.TINY_PNG_ENABLED is True and uploaded_file.content_type in \
                    IMAGE_TYPES_FOR_TINY_PNG:
                compress_image(default_storage.path(path))
            size = get_image_dimensions(default_storage.path(path), True)
            if size:
                width, height = size
                return HttpResponse(json.dumps({
                    'url': default_storage.url(path),
                    'filename': path,
                    'data': {'width': width, 'height': height}
                }))
        return HttpResponse(status=403, content='Bad image format')
    return HttpResponse(status=403)
Пример #3
0
def write_uploaded_file_to_dataset(dataset, uploaded_file_post,
                                   subdir=None):
    """
    Writes file POST data to the dataset directory in the file store

    :param dataset: dataset whose directory to be written to
    :type dataset: models.Model
    :param uploaded_file_post: uploaded file (either UploadedFile or File)
    :type uploaded_file_post: types.FileType
    :rtype: the path of the file written to
    """

    filename = uploaded_file_post.name
    if subdir is not None:
        filename = path.join(subdir, filename)

    from django.core.files.storage import default_storage

    # Path on disk can contain subdirectories - but if the request
    # gets tricky with "../" or "/var" or something we strip them
    # out..
    try:
        copyto = path.join(get_dataset_path(dataset), filename)
        default_storage.path(copyto)
    except (SuspiciousOperation, ValueError):
        copyto = path.join(get_dataset_path(dataset), path.basename(filename))

    logger.debug("Writing uploaded file %s" % copyto)

    realcopyto = default_storage.save(copyto, uploaded_file_post)

    if copyto != realcopyto:
        logger.debug("Actually wrote uploaded file to %s" % copyto)

    return realcopyto
Пример #4
0
    def check_availability(self):
        """
        Perform check against Default Storage.
        """
        try:
            name = default_storage.get_valid_name('Informer Storage')

            # Save data.
            content = ContentFile('File used by StorageInformer checking.')
            path = default_storage.save(name, content)

            # Check properties.
            default_storage.size(path)
            default_storage.url(path)
            default_storage.path(path)

            default_storage.get_accessed_time(path)
            default_storage.get_available_name(path)
            default_storage.get_created_time(path)
            default_storage.get_modified_time(path)
            default_storage.get_valid_name(path)

            # And remove file.
            default_storage.delete(path)

            storage = default_storage.__class__.__name__
        except Exception as error:
            raise InformerException(
                f'An error occurred when trying to use your Storage: {error}')
        else:
            return True, f'Your {storage} is operational.'
Пример #5
0
def rpc_push(request):
    response = HttpResponse()

    response.write("<h1>upload data</h1>")
    response.write("curl -F upload_filename=@LICENSE http://localhost:8000/PUSH/")

    if request.method == "GET":
        return rpc_get(request, int(request.GET.get("id")))

    if request.method == "POST":
        token = request.POST.get("token")
        server = check_auth_host(token)
        if server:
            current = datetime.now()
            for filename, mem_file in request.FILES.items():
                filepath = default_storage.path('upload/tmp/%s' % filename)
                with open(filepath, 'wb+') as destination:
                    for chunk in mem_file.chunks():
                        destination.write(chunk)
            new_file = "%s_%s.%s" % (current.date(), current.time(), filename.split(".")[-1])
            logger.info("file %s was uploaded %s" % (new_file, datetime.now() - current))
            new_file = default_storage.path('upload/%d/%s' % (server.id, new_file))
            os.rename(filepath, new_file)
        else:
            response.write("<h2>Error: authorized token doesn't exist<h2>")
    response['Content-length'] = str(len(response.content))
    return response
Пример #6
0
def decrypt(request):

    myEntry = Entry.objects.filter(slug=request.GET['slug'])[0]
    fileName = default_storage.path(myEntry.myFile)
    key = hashlib.sha256(request.GET['pass']).digest() 

    try:
        CryptoLib.decrypt_file(key, fileName)
        dec_filePath = default_storage.path(fileName[:-4])

        print dec_filePath

        wrapper = FileWrapper(file(dec_filePath))
        response = HttpResponse(wrapper, content_type='application/zip')
        response['Content-Length'] = os.path.getsize(dec_filePath)

        default_storage.delete(fileName[:-4])

    except:
        #nuke the media directory
        shutil.rmtree(default_storage.path(''))
        os.mkdir(default_storage.path(''))
        raise

    return response
Пример #7
0
    def save_model(self, request, obj, form, change):
        if change == False and request.POST.__contains__('_saveasnew'):  
            path_info = request.META['HTTP_REFERER']
            id = path_info.split('/')[-2:-1]    #old image id - table of size 1  
            s_file = StaticFile.objects.get(pk = int(id[0]))    
            path = generate_file_path(None, request.POST['filename'])
            old_path = s_file.static_file
            img_path = 'uploads/'+str(old_path)
            result = img_path #urllib.urlretrieve(img_path)   #uploads/folder/filename.ext

            if request.POST['crop_coords'] != "":
                crop_coords = map(int, request.POST['crop_coords'].split(','))
                file = default_storage.open(old_path)
                img = Image.open(file)
                cropped_img = img.crop((crop_coords[0], crop_coords[1], crop_coords[0]+ crop_coords[2], crop_coords[1] + crop_coords[3]))
                cropped_img.save(default_storage.path(path))
                obj.width, obj.height = cropped_img.size         
                obj.crop_coords = ''
            else:
                file = default_storage.open(old_path)
                img = Image.open(file)
                img.save(default_storage.path(path))
                obj.width, obj.height = img.size
            obj.static_file.save(path, File(open(default_storage.path(path))), save=True)

            obj.user = request.user
            obj.save()

        else:
            return super(FileAdmin, self).save_model(request, obj, form, change)   
Пример #8
0
def create_thumbnail(image_name):
    image_path = default_storage.path(TWITTER_IMAGE_PATH + image_name)
    image = Image.open(image_path)
    image.thumbnail(THUMBNAIL_SIZE, Image.ANTIALIAS)
    thumbnail_path = default_storage.path(THUMBNAIL_PATH + image_name)
    #if (image_path.lower()).find('jpg') <= -1:
    #    thumbnail_path=thumbnail_path+'.jpg'
    image.save(thumbnail_path,'JPEG')
    return thumbnail_path
Пример #9
0
    def test_generate_attachments_zip_export(self):
        filenames = [
            'OSMWay234134797.osm',
            'OSMWay34298972.osm',
        ]
        osm_fixtures_dir = os.path.realpath(
            os.path.join(
                os.path.dirname(api_tests.__file__), 'fixtures', 'osm'))
        paths = [
            os.path.join(osm_fixtures_dir, filename) for filename in filenames
        ]
        xlsform_path = os.path.join(osm_fixtures_dir, 'osm.xlsx')
        self._publish_xls_file_and_set_xform(xlsform_path)
        submission_path = os.path.join(osm_fixtures_dir, 'instance_a.xml')
        count = Attachment.objects.filter(extension='osm').count()
        self._make_submission_w_attachment(submission_path, paths)
        self.assertTrue(
            Attachment.objects.filter(extension='osm').count() > count)

        options = {"extension": Export.ZIP_EXPORT}

        export = generate_attachments_zip_export(
            Export.ZIP_EXPORT, self.user.username, self.xform.id_string, None,
            options)

        self.assertTrue(export.is_successful)

        temp_dir = tempfile.mkdtemp()
        zip_file = zipfile.ZipFile(default_storage.path(export.filepath), "r")
        zip_file.extractall(temp_dir)
        zip_file.close()

        for a in Attachment.objects.all():
            self.assertTrue(
                os.path.exists(os.path.join(temp_dir, a.media_file.name)))
        shutil.rmtree(temp_dir)

        # deleted submission
        submission = self.xform.instances.filter().first()
        submission.deleted_at = timezone.now()
        submission.save()

        export = generate_attachments_zip_export(
            Export.ZIP_EXPORT, self.user.username, self.xform.id_string, None,
            options)
        self.assertTrue(export.is_successful)
        temp_dir = tempfile.mkdtemp()
        zip_file = zipfile.ZipFile(default_storage.path(export.filepath), "r")
        zip_file.extractall(temp_dir)
        zip_file.close()

        for a in Attachment.objects.all():
            self.assertFalse(
                os.path.exists(os.path.join(temp_dir, a.media_file.name)))
        shutil.rmtree(temp_dir)
Пример #10
0
 def __init__(self, f):
     if not bool(f) or not default_storage.exists(f):
         self.file = ''
         self.last_modification = None
         self.size = size(0)
         self.url = ""
     else:
         self.file = basename(default_storage.path(f))
         self.last_modification = getmtime(default_storage.path(f))
         self.size = size(getsize(default_storage.path(f)))
         self.url = ""
Пример #11
0
    def setUp(self):
        self.b1 = Book.objects.create(name="Seguranca em Redes Informaticas", author="Andre Zuquete",
                                      production_date="2013-05-01",
                                      original_file=default_storage.path(BASE_DIR + '/media/books/pg6598.txt'))

        self.b2 = Book.objects.create(name="Cyclopedia of Telephony & Telegraphy Vol. 2",
                                      author="Kempster Miller et. al.",
                                      production_date="2010-08-15",
                                      original_file=default_storage.path(BASE_DIR + '/media/books/pg33437.txt'))

        self.a1 = Account.objects.create(email='*****@*****.**', username='******', first_name='unit', last_name='test', user_data=self.ucd1)
    def create(self, request, *args, **kwargs):
        """
        B{Create} a private competition round
        B{URL:} ../api/v1/competitions/private/round/

        :type  competition_name: str
        :param competition_name: The competition name
        :type  grid: str
        :param grid: The grid path from resources
        :type  param_list: str
        :param param_list: The param_list from resources
        :type  lab: str
        :param lab: The lab from resources
        """
        serializer = self.serializer_class(data=request.data)

        if serializer.is_valid():
            private_competition = get_object_or_404(Competition.objects.all(),
                                                    name=serializer.validated_data['competition_name'])

            # this competition must be a private competition
            MustBePrivateCompetition(competition=private_competition)

            # verify if the team is enrolled in the competition
            UserCanAccessToThePrivateCompetition(competition=private_competition, user=request.user)

            # create a round for this competition
            try:
                has = Round.objects.filter(parent_competition=private_competition,
                                           grid_path=default_storage.path(serializer.validated_data['grid']),
                                           param_list_path=default_storage.path(
                                               serializer.validated_data['param_list']),
                                           lab_path=default_storage.path(serializer.validated_data['lab'])).count()

                if has > 0:
                    return Response({'status': 'Bad request',
                                     'message': 'You already have one solo trial with those files!'},
                                    status=status.HTTP_400_BAD_REQUEST)

                with transaction.atomic():
                    r = Round.objects.create(name=uuid.uuid4(), parent_competition=private_competition)
                    PrivateCompetitionRound.set_param(r, serializer.validated_data['grid'], 'grid')
                    PrivateCompetitionRound.set_param(r, serializer.validated_data['param_list'], 'param_list')
                    PrivateCompetitionRound.set_param(r, serializer.validated_data['lab'], 'lab')
            except IntegrityError, e:
                return Response({'status': 'Bad request',
                                 'message': e.message},
                                status=status.HTTP_400_BAD_REQUEST)
            except DataError, e:
                return Response({'status': 'Bad request',
                                 'message': 'Please specify correctly the files!'},
                                status=status.HTTP_400_BAD_REQUEST)
Пример #13
0
    def to_representation(self, instance):
        grid = basename(default_storage.path(instance.grid_path))
        lab = basename(default_storage.path(instance.lab_path))
        param_list = basename(default_storage.path(instance.param_list_path))

        return {
            'name': instance.name,
            'grid': grid,
            'grid_path': "resources/"+default_storage.path(instance.grid_path).split('/media/resources/')[1],
            'param_list': param_list,
            'lab': lab,
            'lab_path': "resources/" + default_storage.path(instance.lab_path).split('/media/resources/')[1],
            'created_at': instance.created_at,
            'updated_at': instance.updated_at
        }
Пример #14
0
    def test_save(self):
        self.assertTrue(self.photo.md5sum)
        # prepare new upload image
        thumb = Image.new('RGB', (1024, 768), 'red')  # the same size and color
                                                      # as self.photo.image
        thumb_io = BytesIO()
        thumb.save(thumb_io, format='JPEG')

        # prevent the purposefully-thrown exception from breaking the entire
        # unittest's transaction
        with transaction.atomic():
            self.assertRaisesMessage(IntegrityError, "UNIQUE constraint failed: "
                                                     "loader_photo.md5sum",
                                     PhotoFactory,
                                     image = ContentFile(thumb_io.getvalue(),
                                                         "test.jpg"),
                                     name = "Uploaded Photo 1",
                                     thumbnail = None  # we won't generate
                                                       # thumbnail image
                                     )
        path = default_storage.path(name="photos/test.jpg")
        default_storage.delete(path)  # remove photo created in 'media' folder

        # no problems with the new different image
        self.up_photo = PhotoFactory(name = "Uploaded Photo 1")  # new blue image
        self.assertNotEqual(self.up_photo.md5sum, self.photo.md5sum)
Пример #15
0
def dump_project(self, user, project):
    mbuilder = MagicMailBuilder(template_mail_cls=InlineCSSTemplateMail)
    path = "exports/{}/{}-{}.json".format(project.pk, project.slug, self.request.id)
    storage_path = default_storage.path(path)

    try:
        url = default_storage.url(path)
        with default_storage.open(storage_path, mode="w") as outfile:
            render_project(project, outfile)

    except Exception:
        ctx = {
            "user": user,
            "error_subject": _("Error generating project dump"),
            "error_message": _("Error generating project dump"),
            "project": project
        }
        email = mbuilder.export_error(user, ctx)
        email.send()
        logger.error('Error generating dump %s (by %s)', project.slug, user, exc_info=sys.exc_info())
        return

    deletion_date = timezone.now() + datetime.timedelta(seconds=settings.EXPORTS_TTL)
    ctx = {
        "url": url,
        "project": project,
        "user": user,
        "deletion_date": deletion_date
    }
    email = mbuilder.dump_project(user, ctx)
    email.send()
Пример #16
0
def serve_content(storage, name):
    """
    Generate a response to server protected content.
    """
    if not storage.exists(name):
        raise Http404

    # Non-filesystem storages should re-direct to a temporary URL
    if not storage.__class__.__name__ == 'FileSystemStorage':
        return HttpResponseRedirect(storage.url(name))

    # If behind a real server, use send-file
    if settings.GEDGO_SENDFILE_HEADER:
        response = HttpResponse()
        response[settings.GEDGO_SENDFILE_HEADER] = default_storage.path(name)
    # Otherwise, serve it ourselves, which should only happen in DEBUG mode
    else:
        wrapper = FileWrapper(storage.open(name))
        response = HttpResponse(wrapper)

    # Set various file headers and return
    base = path.basename(name)
    response['Content-Type'] = mimetypes.guess_type(base)[0]
    response['Content-Length'] = storage.size(name)
    if response['Content-Type'] is None:
        response['Content-Disposition'] = "attachment; filename=%s;" % (base)
    return response
Пример #17
0
 def __init__(self):
     super(Giedo, self).__init__(settings.GIEDO_SOCKET)
     self.l = logging.getLogger('giedo')
     self.last_sync_ts = 0
     self.daan, self.cilia = None, None
     try:
         self.daan = WhimClient(settings.DAAN_SOCKET)
     except:
         self.l.exception("Couldn't connect to daan")
     try:
         self.cilia = WhimClient(settings.CILIA_SOCKET)
     except:
         self.l.exception("Couldn't connect to cilia")
     self.mirte = mirte.get_a_manager()
     self.threadPool = self.mirte.get_a('threadPool')
     self.operation_lock = threading.Lock()
     self.push_changes_event = threading.Event()
     self.openvpn_lock = threading.Lock()
     self.threadPool.execute(self.run_change_pusher)
     if default_storage.exists("villanet.pem"):
         self.villanet_key = RSA.load_pub_key(default_storage.path(
             "villanet.pem"))
     self.ss_actions = (
               ('postfix', self.daan, self._gen_postfix),
               ('postfix-slm', self.daan, self._gen_postfix_slm),
               ('mailman', self.daan, self._gen_mailman),
               ('forum', self.daan, self._gen_forum),
               ('unix', self.cilia, self._gen_unix),
               ('wiki', self.daan, self._gen_wiki),
               ('ldap', self.daan, self._gen_ldap),
               ('wolk', self.cilia, self._gen_wolk),
               ('quassel', self.daan, self._gen_quassel))
     self.push_changes_event.set()
Пример #18
0
    def get(request, team_name, agent_name):
        """
        B{Retrieve} the agent files as tar
        B{URL:} ../api/v1/agents/agent_file/<team_name>/<agent_name>/
        Must be part of the team owner of the agent

        Server to Server only

        :type  agent_name: str
        :param agent_name: The agent name
        :type  team_name: str
        :param team_name: The team name
        """
        team = get_object_or_404(Team.objects.all(), name=team_name)
        agent = get_object_or_404(Agent.objects.all(), team=team, agent_name=agent_name)

        if AgentFile.objects.filter(agent=agent).count() == 0:
            return Response({'status': 'Bad request',
                             'message': 'The agent doesn\'t have files.'},
                            status=status.HTTP_400_BAD_REQUEST)

        temp = tempfile.NamedTemporaryFile()
        with tarfile.open(temp.name, "w:gz") as tar:
            for file_obj in AgentFile.objects.filter(agent=agent):
                tar.add(default_storage.path(file_obj.file), arcname=file_obj.original_name)
            tar.close()

        wrapper = FileWrapper(temp)
        response = HttpResponse(wrapper, content_type="application/x-compressed")
        response['Content-Disposition'] = 'attachment; filename=' + agent_name + '.tar.gz'
        response['Content-Length'] = getsize(temp.name)
        temp.seek(0)
        return response
Пример #19
0
    def handle(self, *args, **options):
        try:
            app_label, model_name, field_name, variant_name = args
        except ValueError:
            raise CommandError(self.USAGE)

        variant = get_variant(app_label, model_name, field_name, variant_name)

        in_database = set(
            getattr(getattr(x, field_name), variant_name).filename
            for x in variant.image.field.model._default_manager.all()
        )

        base = os.path.join(
            variant.image.field.upload_to,
            variant.name,
        )

        on_disk = set(
            os.path.join(
                variant.image.field.upload_to,
                variant.name,
                x,
            ) for x in os.listdir(default_storage.path(base))
        )

        for x in on_disk.difference(in_database):
            print("I: Can be deleted: %s" % x)
Пример #20
0
def get_sapelli_dir_path(user=None):
    """
    Creates the Sapelli working directory.

    Parameters
    ----------
    user : geokey.users.models.User
        User who uploaded the project (optional).

    Returns
    -------
    str:
        Absolute path to the Sapelli working directory.

    Raises
    ------
    SapelliException:
        When the working directory could not be created.
    """
    sapelli_dir_path = os.path.join(default_storage.path('sapelli'), '')  # joining with '' adds the trailing / or \
    if user:
        sapelli_dir_path = os.path.join(sapelli_dir_path, slugify(str(user.id) + '_' + user.display_name), '')
    if not os.path.exists(sapelli_dir_path):
        # Create the directory if it doesn't exist:
        try:
            os.makedirs(sapelli_dir_path)
        except BaseException, e:
            raise SapelliException('Failed to create Sapelli working directory (%s): %s' % (sapelli_dir_path, str(e)))
Пример #21
0
    def _get_size(self, image_file, ratiofield):
        ratio = ratiofield.ratio_width / float(ratiofield.ratio_height)
        try:
            width, height = get_image_dimensions(default_storage.path(image_file), True)

            size = (height * ratio, height)
            option = self.option

            if option:
                if option[0] == 'scale':
                    width = size[0] * option[1]
                    height = size[1] * option[1]
                elif option[0] == 'width':
                    width = option[1]
                    height = size[1] * width / size[0]
                elif option[0] == 'height':
                    height = option[1]
                    width = height * size[0] / size[1]
                elif option[0] == 'max_size':
                    max_width, max_height = option[1]
                    width, height = size
                    # recalculate height if needed
                    if max_width < width:
                        height = height * max_width / width
                        width = max_width
                    # recalculate width if needed
                    if max_height < height:
                        width = max_height * width / height
                        height = max_height

                size = (width, height)
            return size
        except IOError:
            print "image %s does not exist" % image_file
            return None
Пример #22
0
def change_file(new_file_name, custom_gain):

    def timeout(args):
        print "[Timer expired] Deleting " + args
        os.remove(default_storage.path(args))

    try:
        tags = ReplayGain(default_storage.path(new_file_name), custom_gain)
        print tags
        new_file = FileWrapper(open(default_storage.path(new_file_name)))
        t = Timer(10, timeout, args=[new_file_name])
        t.start()
        return new_file
        #~
    except:
        message = 'Error: is the file a proper MP3?'
Пример #23
0
def attach_upload(request):
    if not request.user.is_authenticated():
        return HttpResponse(get_script_response(code=403, message='Unauthorized'))

    if 'userfile' not in request.FILES:
        return HttpResponse(get_script_response(code=400, message='File expected'))

    f = request.FILES['userfile']     # file is present only if request method is POST and form type was specified
    if not f.content_type.startswith('image/'):
        return HttpResponse(get_script_response(code=400, message='File format not supported!'))

    hash_base = "%s-%s-%s-%s" % (f.name, datetime.datetime.now().isoformat(' '), request.META['HTTP_USER_AGENT'], request.META['REMOTE_ADDR'])
    fname_base = hashlib.md5(hash_base.encode('utf-8')).hexdigest()
    ext = f.name.split('.')[-1]
    if len(ext) > 5:
        ext = ext[0:2]
    full_file = "uploads/%s.%s" % (fname_base, ext)
    thumb_file = 'uploads/thumb_%s.jpg' % fname_base

    log.debug("Generated name for upload: %s" % fname_base)
    log.debug("Generated filename for thumbnail: %s" % thumb_file)

    fs.save(full_file, f)

    try:
        im = Image.open(fs.path(full_file))
        im.thumbnail(settings.IMG_THUMBS_SIZE, Image.ANTIALIAS)
        im.save(fs.path(thumb_file), "JPEG")
    except IOError as e:
        log.error("Failed to create thumbnail. %s" % e.message)
        fs.save(thumb_file, f)

    #save names in session
    uid = generate_uid()
    while uid in request.session:
        uid = generate_uid()

    request.session[uid] = (fname_base, full_file, thumb_file, ext)

    #return url to it
    response = HttpResponse(
        get_script_response(code=0,
            message=uid,
            thumb_url=fs.url(thumb_file)
        )
    )
    return response
Пример #24
0
 def test_delete_webp_folder(self):
     webp_image = WebPImage.objects.create(
         static_path='django-test-image.png', quality=60)
     webp_image.save_image()
     webp_path = default_storage.path(webp_settings.WEBP_CONVERTER_PREFIX)
     self.assertTrue(os.path.exists(webp_path))
     utils.delete_webp_folder()
     self.assertFalse(os.path.exists(webp_path))
Пример #25
0
def generate_thumbnail(image, max_size=None, add_watermark=False):
    """
    Generates a thumbnail from a `models.Image`.
    If `max_size` is None, the default value is used.
    Returns the bytes of the thumbnail.
    """
    if max_size is None:
        max_size = 100

    assert image is not None
    assert isinstance(max_size, int)
    if image.thumbnail_image:
        img = PilImage.open(default_storage.path(image.thumbnail_image.path))
    else:
        img = PilImage.open(default_storage.path(image.image.path))

    img.thumbnail((max_size, max_size,), PilImage.ANTIALIAS)
    output_file = BytesIO()

    if add_watermark:
        angle = 20
        opacity = 0.8

        image_width, image_height = img.size
        text = image.studio.watermark_text or image.studio.name
        watermark_font, watermark_text_width, watermark_text_height = _get_font_for_image(img, text, max_size)

        watermark = PilImage.new('RGBA', img.size, (0, 0, 0, 0))
        draw = ImageDraw.Draw(watermark, 'RGBA')
        pos_x = int((image_width - watermark_text_width) / 2)
        pos_y = int(((image_height - watermark_text_height) / 2))

        draw.text([pos_x, pos_y - watermark_text_height], text, font=watermark_font)
        draw.text([pos_x, pos_y + watermark_text_height], text, font=watermark_font)

        watermark = watermark.rotate(angle, PilImage.BICUBIC)
        alpha = watermark.split()[3]
        alpha = ImageEnhance.Brightness(alpha).enhance(opacity)
        watermark.putalpha(alpha)

        PilImage.composite(watermark, img, watermark).save(output_file, 'JPEG')
    else:
        img.save(output_file, "JPEG")

    del img  # no need to close it
    return output_file.getvalue()
Пример #26
0
	def handle_noargs(self, **options):
		info = options.get('info')
		strip = options.get('strip')
		del_count = 0
		strip_count = 0

		for (path, dirs, files) in os.walk(storage.path('pictures')):
			rel_path = os.path.relpath(path, settings.MEDIA_ROOT)
			for f in files:
				f_path = os.path.join(rel_path, f)
				extension = f.split('.')[-1].lower()
				format = imghdr.what(os.path.join(path, f))

				if not format:
					self.stdout.write("Unknown format: %s\n" % f_path)

				if not Picture.objects.filter(image=f_path).exists():
					if info:
						self.stdout.write("Detected orphaned media file '%s'\n" % f_path)
					else:
						self.stdout.write("Deleting orphaned media file '%s'\n" % f_path)
						storage.delete(f_path)
					del_count = del_count + 1
				else:
					# Detect PNG images saved as JPG
					if format == 'png' and extension == 'jpg':
						p = Picture.objects.get(image=f_path)
						self.stdout.write("Renaming %s image\n" % p)
						p.image.save(f.replace('.jpg', '.png'), p.image)
					elif strip and extension in ['jpg', 'jpeg']:
						try:
							# Process EXIF data (strip all except copyright if present)
							exif_writer = MinimalExifWriter(storage.path(f_path))
							exif_writer.removeExif()
							exif_writer.process()
							strip_count = strip_count + 1
						except Exception as e:
							self.stdout.write("Unable to strip EXIF data from '%s': %s\n" % (f_path, str(e)))

		if info:
			self.stdout.write("Detected %d orphaned media files\n" % del_count)
		else:
			self.stdout.write("Deleted %d orphaned media files\n" % del_count)

		if strip:
			self.stdout.write("Stripped EXIF data from %d pictures\n" % strip_count)
Пример #27
0
def output(request, slug, extension):
    assert settings.DEBUG, "Overridden by nginx configuration"

    return serve(
        request,
        'output.%s' % extension,
        document_root=default_storage.path(slug),
    )
Пример #28
0
def getShow(s):
  url = s.MediaURL
  fileName = s.getAudioFileName() 
  save_path = default_storage.path(fileName)
  if not os.path.isfile(save_path):
    downloadTo(url, save_path)
    s.AudioFile.name = fileName
    s.save()
Пример #29
0
    def test_thumbnail_created_at_right_path(self):
        image_name = 'test.jpg'

        created_path = create_thumbnail(image_name) 

        thumbnail_path = 'collected_twitter_images/thumbnails/' + image_name
        expected_path = default_storage.path(thumbnail_path)
        self.assertEqual(created_path, expected_path)
Пример #30
0
 def _render(self, path, val, caller):
     if val == 'email_embedded_media':
         fullpath = default_storage.path(path)
     elif val == 'email_embedded_static':
         fullpath = staticfiles_storage.path(path)
     else:
         fullpath = path
     return 'cid:' + self.environment.email_object_instance.attach_related_file(fullpath)
Пример #31
0
    def prepare_offline_file(session, local_path):
        replay_path = default_storage.path(local_path)
        current_dir = os.getcwd()
        dir_path = os.path.dirname(replay_path)
        replay_filename = os.path.basename(replay_path)
        meta_filename = '{}.json'.format(session.id)
        offline_filename = '{}.tar'.format(session.id)
        os.chdir(dir_path)

        with open(meta_filename, 'wt') as f:
            f.write(model_to_json(session))

        with tarfile.open(offline_filename, 'w') as f:
            f.add(replay_filename)
            f.add(meta_filename)
        file = open(offline_filename, 'rb')
        os.chdir(current_dir)
        return file
Пример #32
0
    def convertir_audio_de_archivo_de_audio_globales(self, archivo_de_audio):
        """Realiza la conversión y actualiza la instancia de ArchivoDeAudio.

        Esta funcion debe usarse en el Alta y Modificacioin de ArchivoDeAudio.

        :param archivo_de_audio: ArchivoDeAudio para la cual hay que convertir
                                 el audio
        :type archivo_de_audio: ominicontacto_app.models.ArchivoDeAudio
        :raises: OmlAudioConversionError
        """

        assert isinstance(archivo_de_audio, ArchivoDeAudio)

        # chequea archivo original (a convertir)
        wav_full_path = default_storage.path(
            archivo_de_audio.audio_original.name)
        assert os.path.exists(wav_full_path)

        # genera nombre del archivo de salida
        _template = ConversorDeAudioService.\
            TEMPLATE_NOMBRE_AUDIO_ASTERISK_PREDEFINIDO
        filename = _template.format(
            archivo_de_audio.descripcion,
            settings.TMPL_OML_AUDIO_CONVERSOR_EXTENSION)

        # Creamos directorios si no existen
        abs_output_dir = os.path.join(
            settings.MEDIA_ROOT, ConversorDeAudioService.DIR_AUDIO_PREDEFINIDO)

        self._crear_directorios(abs_output_dir)

        # Creamos archivo si no existe
        abs_output_filename = os.path.join(abs_output_dir, filename)
        self._crear_archivo(abs_output_filename)

        assert os.path.exists(abs_output_filename)

        # convierte archivo
        self._convertir_audio(wav_full_path, abs_output_filename)

        # guarda ref. a archivo convertido
        archivo_de_audio.audio_asterisk = os.path.join(
            ConversorDeAudioService.DIR_AUDIO_PREDEFINIDO, filename)
        archivo_de_audio.save()
Пример #33
0
def create_perma_wb_router(config={}):
    """
        Configure server.

        This should do basically the same stuff as pywb.webapp.pywb_init.create_wb_router()
    """
    # paths
    script_path = os.path.dirname(__file__)

    # Get root storage location for warcs.
    # archive_path should be the location pywb can find warcs, like 'file://generated/' or 'http://perma.s3.amazonaws.com/generated/'
    # We can get it by requesting the location of a blank file from default_storage.
    # default_storage may use disk or network storage depending on config, so we look for either a path() or url()
    try:
        archive_path = 'file://' + default_storage.path('') + '/'
    except NotImplementedError:
        archive_path = default_storage.url('/')
        archive_path = archive_path.split('?', 1)[0]  # remove query params

    query_handler = QueryHandler.init_from_config(PermaCDXSource())

    # pywb template vars (used in templates called by pywb, such as head_insert.html, but not our ErrorTemplateView)
    add_env_globals({'static_path': settings.STATIC_URL})

    # use util func to create the handler
    wb_handler = create_wb_handler(
        query_handler,
        dict(archive_paths=[archive_path],
             wb_handler_class=PermaGUIDHandler,
             buffer_response=True,
             head_insert_html=os.path.join(script_path, 'head_insert.html'),
             enable_memento=True,
             redir_to_exact=False))

    wb_handler.replay.content_loader.record_loader.loader = CachedLoader()

    route = PermaRoute(GUID_REGEX, wb_handler)

    router = create_wb_router(config)
    router.error_view = PermaTemplateView('archive-error.html')
    router.routes.insert(0, route)

    return router
Пример #34
0
def get_store_URL(request, *args, **kwargs):

    # these urls are relative for s3
    rel_url = request.GET.get('rel_url', None)

    # just need to convert it to s3 url. That's all.

    # Note: storage starts from INSIDE 'media' folder, because that's 
    #       the media url provided in the settings. 

    # print("...storage.exists? {}".format(storage.listdir(""))) # I have NO idea where I am
    # print("...converting url: {}".format(rel_url))

    rel_obj_path = get_rel_path(rel_url, "frame_images")
    print("------ rel_obj_path: {}".format(rel_obj_path))
    abs_obj_path = ''
    store_path = ''
    if rel_obj_path and storage.exists(str(rel_obj_path)):
        print("EXISTS! Object at {}".format(rel_obj_path))
        # print("Path check: {}".format(storage.path(str(rel_obj_path)) )) # fails here on heroku

        if hasattr(settings, 'USE_S3') and settings.USE_S3 == True:
            # Generate path for S3
            if hasattr(settings, 'MEDIA_URL'):
                store_path = "{}{}".format(settings.MEDIA_URL, rel_obj_path)
            else:
                return JsonResponse({
                    'status':'false',
                    'message': "Cannot get S3 media url from settings."
                    }, status=500
                )
        else:
            store_path = PurePosixPath(storage.path(str(rel_obj_path)))
            # abs_obj_path = store_path.joinpath(rel_obj_path) # no need?

        abs_obj_path = store_path
        print("------- joined path = {}".format(store_path))
    else:
        print("ERROR: Could not find object at {}".format(rel_obj_path))
        # This does not reject request. It returns "" as URL.

    # Response
    return JsonResponse({'url': str(abs_obj_path)})
Пример #35
0
def image_registration(request, pk):
    if request.method == 'POST':
        image = request.POST['autentication_image']
        user = get_object_or_404(Student, pk=pk)
        user_image = convert64toImage(image, user.user.first_name)
        temppath = default_storage.save('temp.png', content=user_image)
        temp_filepath = default_storage.path(temppath)
        face_accepted = encoded_face(temp_filepath)
        if face_accepted:
            user.image = user_image
            user.save()
            default_storage.delete(temppath)
            return redirect('course:user_dashboard')
        else:
            default_storage.delete(temppath)
            messages.warning(request, 'Face Not decteted.')

        default_storage.delete(temppath)
    return render(request, 'auth_image.html')
Пример #36
0
 def _get_video_info(self):
     """
     Returns basic information about the video as dictionary.
     """
     if not hasattr(self, '_info_cache'):
         encoding_backend = get_backend()
         try:
             _path = getattr(self, 'path', self.name)
         except NotImplementedError:
             _path = self.name
         try:
             path = default_storage.path(_path)
         except NotImplementedError:
             path = default_storage.url(_path)
         try:
             self._info_cache = encoding_backend.get_media_info(path)
         except FFmpegError:
             self._info_cache = {}
     return self._info_cache
Пример #37
0
    def _test_logfile_upload(self, user, uuid):
        # Upload crashreport
        device_local_id = self.upload_crashreport(user, uuid)

        # Upload a logfile for the crashreport
        self.upload_logfile(user, uuid, device_local_id)

        logfile_instance = (Device.objects.get(uuid=uuid).crashreports.get(
            device_local_id=device_local_id).logfiles.last())
        uploaded_logfile_path = crashreport_file_name(
            logfile_instance,
            os.path.basename(Dummy.DEFAULT_LOG_FILE_PATHS[0]))

        self.assertTrue(default_storage.exists(uploaded_logfile_path))
        # The files are not 100% equal, because the server adds some extra
        # bytes. However, we mainly care that the contents are equal:
        self._assert_zip_file_contents_equal(
            default_storage.path(uploaded_logfile_path),
            Dummy.DEFAULT_LOG_FILE_PATHS[0],
        )
Пример #38
0
def upload_drive_task(filename, tmp_storage_file, submission_obj, parent):
    print(filename, tmp_storage_file, submission_obj, parent)

    meta = {"name": filename, "parents": [parent]}

    file_path = default_storage.path(tmp_storage_file)

    print(file_path)

    try:
        creds = Credentials(token=DRIVE_TOKENS.get('access'))
        service = build('drive', 'v3', credentials=creds)
        media_body = MediaFileUpload(filename=file_path,
                                     chunksize=1024 * 1024 * 50,
                                     resumable=True)

        file = service.files().create(body=meta,
                                      media_body=media_body).execute()
    except RefreshError:
        refresh_token()

        creds = Credentials(token=DRIVE_TOKENS.get('access'))
        service = build('drive', 'v3', credentials=creds)
        media_body = MediaFileUpload(filename=file_path,
                                     chunksize=1024 * 1024 * 50,
                                     resumable=True)

        file = service.files().create(body=meta,
                                      media_body=media_body).execute()

    url = 'https://drive.google.com/file/d/{}/view'.format(file.get('id'))

    # submission_obj = Submission.objects.get(id=submission_id)
    submission_obj.submission_url = url
    submission_obj.task_submitted = True
    submission_obj.save()

    if not media_body._fd.closed:
        media_body._fd.close()

    default_storage.delete(tmp_storage_file)
Пример #39
0
def upload_session_replay_to_external_storage(session_id):
    logger.info(f'Start upload session to external storage: {session_id}')
    session = Session.objects.filter(id=session_id).first()
    if not session:
        logger.error(f'Session db item not found: {session_id}')
        return
    local_path, foobar = find_session_replay_local(session)
    if not local_path:
        logger.error(f'Session replay not found, may be upload error: {local_path}')
        return
    abs_path = default_storage.path(local_path)
    remote_path = session.get_relative_path_by_local_path(abs_path)
    ok, err = server_replay_storage.upload(abs_path, remote_path)
    if not ok:
        logger.error(f'Session replay upload to external error: {err}')
        return
    try:
        default_storage.delete(local_path)
    except:
        pass
    return
Пример #40
0
    def initial_input(self, *args):
        directory = args[0]
        initial = Collective.objects.create(community=self, schema={})
        directories, files = default_storage.listdir(directory)
        for file in files:
            full_name = directory + '/' + file
            size = default_storage.size(full_name)
            if size < 3 * 1024:
                continue

            properties = {
                "size": size,
                "name": file,
                "file": default_storage.path(full_name),
                "url": default_storage.url(full_name)
            }
            Individual.objects.create(community=self,
                                      collective=initial,
                                      properties=properties,
                                      schema={})
        return initial
Пример #41
0
def prepare(request):
    keys = list(request.POST.keys())
    if len(keys) != 1:
        return 'msg', 'Invalid request'

    testName = keys[0]
    if testName in tests:
        dialog_id = MH.create_dialog()
        ls = tests[testName]
        ls[0].to_pickle(default_storage.path(str(dialog_id)))
        for r in ls[1:-1]:
            if r[0] == 'Q':
                MH.append_query(dialog_id, r[1])
            elif r[0] == 'G':
                MH.append_graph(dialog_id, r[1])
            elif r[0] == 'P':
                MH.set_predicate(dialog_id, r[1])
        MH.set_target_graph_json(dialog_id, ls[ls[-1]][1])
        return 'url', str(dialog_id)

    return 'msg', 'Invalid request'
Пример #42
0
 def process_file(path):
     """Process single file"""
     # failsafe copy of file
     copy = default_storage.open(path, 'rb')
     default_storage.save(path + timestamp, copy)
     copy.close()
     try:
         path = default_storage.path(path)
         squeeze(path)
     except NotImplementedError:
         if path[-1:] != os.sep:
             pf = default_storage.open(path, 'rwb')
             image = pf.read()
             tmpfilehandle, tmpfilepath = tempfile.mkstemp()
             tmpfilehandle = os.fdopen(tmpfilehandle, 'wb')
             tmpfilehandle.write(image)
             tmpfilehandle.close()
             squeeze(tmpfilepath)
             tmpfilehandle = open(tmpfilepath)
             pf.close()
             default_storage.save(path, tmpfilehandle)
             os.remove(tmpfilepath)
Пример #43
0
class TestSapelliLoader(TestCase):
    def setUp(self):
        self.user = UserFactory.create()

    def tearDown(self):
        # delete project(s):
        for sapelli_project in SapelliProject.objects.filter(sapelli_id__in=[
                horniman_sapelli_project_info['sapelli_id'], 1337
        ]):
            try:
                sapelli_project.geokey_project.delete(
                )  # will also delete sapelli_project
            except BaseException, e:
                pass
        # delete sapelli/user folder
        try:
            shutil.rmtree(
                join(default_storage.path('sapelli'),
                     slugify(str(self.user.id) + '_' + self.user.display_name),
                     ''))
        except BaseException, e:
            pass
Пример #44
0
 def __init__(self):
     super(Giedo, self).__init__(settings.GIEDO_SOCKET)
     self.last_sync_ts = 0
     self.daan = WhimClient(settings.DAAN_SOCKET)
     self.cilia = WhimClient(settings.CILIA_SOCKET)
     self.mirte = mirte.get_a_manager()
     self.threadPool = self.mirte.get_a('threadPool')
     self.operation_lock = threading.Lock()
     self.push_changes_event = threading.Event()
     self.threadPool.execute(self.run_change_pusher)
     if default_storage.exists("villanet.pem"):
         self.villanet_key = RSA.load_pub_key(
             default_storage.path("villanet.pem"))
     self.ss_actions = (('postfix', self.daan, self._gen_postfix),
                        ('postfix-slm', self.daan, self._gen_postfix_slm),
                        ('mailman', self.daan, self._gen_mailman),
                        ('forum', self.daan, self._gen_forum),
                        ('unix', self.cilia, self._gen_unix),
                        ('wiki', self.daan,
                         self._gen_wiki), ('ldap', self.daan,
                                           self._gen_ldap))
     self.push_changes_event.set()
Пример #45
0
    def handle(self, *args, **options):

        webp_path = default_storage.path(WEBP_CONVERTER_PREFIX)

        if options['interactive']:
            confirm_message = (
                'This will delete all files in {webp_path}\n\n'
                'Are you sure you want to do this?\n\n'
                "Type 'yes' to continue, or 'no' to cancel: ").format(
                    webp_path=webp_path)
            if input(confirm_message) != 'yes':
                raise CommandError("Clearing webp cache cancelled.")
        self.stdout.write("Deleting cache entries...")
        for webp_image in WebPImage.objects.all().iterator():
            key = make_image_key(webp_image.static_path, webp_image.quality)
            cache.delete(key)
        self.stdout.write("Deleting WebPImage models...")
        WebPImage.objects.all().delete()
        self.stdout.write("Deleting images...")
        if os.path.exists(webp_path):
            shutil.rmtree(webp_path)
        self.stdout.write("Successfully cleared cache")
Пример #46
0
    def handle(self, *args, **options):
        for partner in Partner.objects.all():
            self.stdout.write("Process files in the outbox directory for "
                              'partner "%s".' % partner.as2_name)
            for org in Organization.objects.all():
                if settings.DATA_DIR:
                    outbox_folder = os.path.join(
                        settings.DATA_DIR,
                        "messages",
                        partner.as2_name,
                        "outbox",
                        org.as2_name,
                    )
                else:
                    outbox_folder = os.path.join("messages", partner.as2_name,
                                                 "outbox", org.as2_name)

                # Check of the directory exists and if not create it
                try:
                    _, pending_files = default_storage.listdir(outbox_folder)
                except FileNotFoundError:
                    pending_files = []
                    os.makedirs(default_storage.path(outbox_folder))

                # For each file found call send message to send it to the server
                pending_files = filter(lambda x: x != ".", pending_files)
                for pending_file in pending_files:
                    pending_file = os.path.join(outbox_folder, pending_file)
                    self.stdout.write(
                        'Sending file "%s" from organization "%s" to partner '
                        '"%s".' %
                        (pending_file, org.as2_name, partner.as2_name))
                    call_command(
                        "sendas2message",
                        org.as2_name,
                        partner.as2_name,
                        pending_file,
                        delete=True,
                    )
Пример #47
0
def create_perma_pywb_app(config):
    """
        Configure server.
    """
    query_handler = QueryHandler.init_from_config(settings.CDX_SERVER_URL)

    # Get root storage location for warcs.
    # archive_path should be the location pywb can find warcs, like 'file://generated/' or 'http://perma.s3.amazonaws.com/generated/'
    # We can get it by requesting the location of a blank file from default_storage.
    # default_storage may use disk or network storage depending on config, so we look for either a path() or url()
    try:
        archive_path = 'file://' + default_storage.path('') + '/'
    except NotImplementedError:
        archive_path = default_storage.url('/')
        archive_path = archive_path.split('?', 1)[0]  # remove query params

    # use util func to create the handler
    wb_handler = create_wb_handler(query_handler,
                                   dict(archive_paths=[archive_path],
                                        wb_handler_class=Handler,
                                        buffer_response=True,

                                        head_insert_html='ui/head_insert.html',
                                        template_globals={'static_path': 'static/js'},

                                        redir_to_exact=False))

    # Finally, create wb router
    return archivalrouter.ArchivalRouter(
        {
            Route(r'([a-zA-Z0-9\-]+)', wb_handler)
        },
        # Specify hostnames that pywb will be running on
        # This will help catch occasionally missed rewrites that fall-through to the host
        # (See archivalrouter.ReferRedirect)
        hostpaths=['http://localhost:8000/'],
        port=8000
    )
Пример #48
0
def load_from_sap(sap_file, user):
    """
    Loads & saves a SapelliProject from the given SAP file.

    Parameters
    ----------
    sap_file : django.core.files.File
        Uploaded (suspected) SAP file.
    user : geokey.users.models.User
        User who uploaded the project.

    Returns
    -------
    SapelliProject:
        SapelliProject instance for the parsed project.

    Raises
    ------
    SapelliException:
        In case of a configuration problem.
    SapelliSAPException:
        When project loading fails.
    SapelliDuplicateException:
        When the project has already been uploaded.
    """
    # Check if we got a file at all:
    if sap_file is None:
        raise SapelliSAPException('No file provided.')

    # Store copy of file on disk (as it probably is an "in memory" file uploaded in an HTTP request):
    try:
        filename, extension = os.path.splitext(os.path.basename(sap_file.name))
        relative_sap_file_path = default_storage.save(
            os.path.join(get_sapelli_dir_path(user), 'SAPs', '') + filename +
            extension, ContentFile(sap_file.read()))
        sap_file_path = default_storage.path(relative_sap_file_path)
    except BaseException, e:
        raise SapelliSAPException('Failed to store uploaded file: ' + str(e))
Пример #49
0
def serve_image_or_thumb(request, image):
    assert isinstance(image, models.Image)

    file_obj = image.get_image_or_thumbnail_file()
    full_filename = default_storage.path(file_obj.path)
    filesize = os.path.getsize(full_filename)
    content_type = image.content_type

    # TODO: send in chunks to avoid loading the file in memory
    # from django.core.servers.basehttp import FileWrapper
    #    with open(full_filename) as f:
    #        fw = FileWrapper(f)
    #        response = HttpResponse(fw, content_type=content_type)
    #        response['Content-Length'] = filesize
    #        response['Content-Disposition'] = 'attachment; filename="{0}"'.format(
    #            filename_to_user)
    #        return response

    with open(full_filename, mode='r+b') as f:
        file_contents = f.read()
    response = HttpResponse(file_contents, content_type=content_type)
    response['Content-Length'] = filesize
    return response
Пример #50
0
    def retrieve(self, request, pk, *args, **kwargs):
        throttle = throttling.ImportDumpModeRateThrottle()

        if not throttle.allow_request(request, self):
            self.throttled(request, throttle.wait())

        project = get_object_or_404(self.get_queryset(), pk=pk)
        self.check_permissions(request, 'export_project', project)

        if settings.CELERY_ENABLED:
            task = tasks.dump_project.delay(request.user, project)
            tasks.delete_project_dump.apply_async(
                (project.pk, project.slug), countdown=settings.EXPORTS_TTL)
            return response.Accepted({"export_id": task.id})

        path = "exports/{}/{}-{}.json".format(project.pk, project.slug,
                                              uuid.uuid4().hex)
        storage_path = default_storage.path(path)
        with default_storage.open(storage_path, mode="w") as outfile:
            service.render_project(project, outfile)

        response_data = {"url": default_storage.url(path)}
        return response.Ok(response_data)
Пример #51
0
def dump_project(self, user, project):
    path = "exports/{}/{}-{}.json".format(project.pk, project.slug,
                                          self.request.id)
    storage_path = default_storage.path(path)

    try:
        url = default_storage.url(path)
        with default_storage.open(storage_path, mode="w") as outfile:
            render_project(project, outfile)

    except Exception:
        # Error
        ctx = {
            "user": user,
            "error_subject": _("Error generating project dump"),
            "error_message": _("Error generating project dump"),
            "project": project
        }
        email = mail_builder.export_error(user, ctx)
        email.send()
        logger.error('Error generating dump %s (by %s)',
                     project.slug,
                     user,
                     exc_info=sys.exc_info())
    else:
        # Success
        deletion_date = timezone.now() + datetime.timedelta(
            seconds=settings.EXPORTS_TTL)
        ctx = {
            "url": url,
            "project": project,
            "user": user,
            "deletion_date": deletion_date
        }
        email = mail_builder.dump_project(user, ctx)
        email.send()
Пример #52
0
def handle_command(dialog_id, command):
    j, cmds = __get(dialog_id)
    MH.append_command(dialog_id, command)
    if __is_draw_command(command):
        cmds.append(command)
        success, err_msgs = j.handle_next(command)
        for msg in err_msgs:
            MH.append_query(dialog_id, msg)
        if success:
            MH.append_query(dialog_id, 'Here is the graph I generated...')
            MH.append_graph(dialog_id, j.get().to_json())
            if __is_graph_equal_to_target(dialog_id, j.get().to_json()):
                MH.append_query(dialog_id, 'Congratulations! You have generated the target graph.')
            MH.append_query(dialog_id, 'What else would you like to do?')
    else:
        command = command.lower()
        if command == 'undo':
            j = __init_jarvis(dialog_id)
            cmds = cmds[:-1]
            print(cmds)
            for c in cmds:
                j.handle_next(c)
            __cache[dialog_id] = j, cmds
            MH.append_query(dialog_id, 'Undo done!')
            MH.append_graph(dialog_id, j.get().to_json())
        elif command == 'reset':
            j = __init_jarvis(dialog_id)
            __cache[dialog_id] = j, []
            MH.append_query(dialog_id, 'Reset done!')
            MH.append_graph(dialog_id, j.get().to_json())
        elif command == 'sample':
            MH.append_query(dialog_id, 'Here are samples of the data...')
            MH.append_query(dialog_id, str(pd.read_pickle(default_storage.path(str(dialog_id))).sample(n=5, random_state=1)))
        if __is_graph_equal_to_target(dialog_id, j.get().to_json()):
            MH.append_query(dialog_id, 'Congratulations! You have generated the target graph.')
        MH.append_query(dialog_id, 'What else would you like to do?')
Пример #53
0
    def setUp(self):
        self.site = Site.objects.get_current()

        #
        # setup the protected dir; since we're using the default storage class,
        # this will point to
        #
        #   /path/to/static/protected/
        #
        # where "/path/to/static/" is your settings.MEDIA_ROOT and "protected"
        # is your PRODUCT.PROTECTED_DIR setting.
        #
        self.protected_dir = default_storage.path(
            config_value('PRODUCT', 'PROTECTED_DIR')
        )
        if not os.path.exists(self.protected_dir):
            os.makedirs(self.protected_dir)

        # setup a temporary file in the protected dir: this is the file that
        # django will use during this test, but we won't use it; close and
        # remove it.
        _file, _abs_path = mkstemp(dir=self.protected_dir)
        os.close(_file)
        os.remove(_abs_path)
        self.file_name = os.path.basename(_abs_path)

        # setup a temporary source dir and source file, using the same file name
        # generated eariler.
        self.dir = mkdtemp()
        self.file = open(os.path.join(self.dir, self.file_name), "w")

        # a fake SHA
        self.key = "".join(["12abf" for i in range(8)])

        # setup a contact
        c, _created = Contact.objects.get_or_create(
            first_name="Jim",
            last_name="Tester",
            email="*****@*****.**",
        )
        ad, _created = AddressBook.objects.get_or_create(
            contact=c, description="home",
            street1 = "test", state="OR", city="Portland",
            country = Country.objects.get(iso2_code__iexact = 'US'),
            is_default_shipping=True,
            is_default_billing=True,
        )

        # setup a order
        o, _created = Order.objects.get_or_create(
            contact=c, shipping_cost=Decimal('6.00'), site=self.site
        )

        # setup download
        self.product, _created = DownloadableProduct.objects.get_or_create(
            product=Product.objects.get(slug='dj-rocks'),
            file=File(self.file),
            num_allowed_downloads=3,
            expire_minutes=1,
        )
        self.product_link, _created = DownloadLink.objects.get_or_create(
            downloadable_product=self.product,
            order=o, key=self.key, num_attempts=0,
            time_stamp=timezone.now()
        )

        # setup client
        self.domain = 'satchmoserver'
        self.client = Client(SERVER_NAME=self.domain)

        # go through the verification step
        self.pd_url = urlresolvers.reverse(
            'satchmo_download_send', kwargs= {'download_key': self.key}
        )
        pd_process_url = urlresolvers.reverse(
            'satchmo_download_process', kwargs= {'download_key': self.key}
        )

        # first, hit the url.
        response = self.client.get(self.pd_url)
        self.assertEqual(response['Location'],
            'http://%s%s' % (self.domain, pd_process_url)
        )

        # follow the redirect to "process" the key.
        response = self.client.get(response['Location'])
        self.assertEqual(self.client.session.get('download_key', None), self.key)
Пример #54
0
def create_document_thumbnail(self, object_id):
    """
    Create thumbnail for a document.
    """
    logger.debug("Generating thumbnail for document #{}.".format(object_id))

    try:
        document = Document.objects.get(id=object_id)
    except Document.DoesNotExist:
        logger.error("Document #{} does not exist.".format(object_id))
        return

    image_path = None
    image_file = None

    if document.is_image:
        if not os.path.exists(storage.path(document.doc_file.name)):
            from shutil import copyfile
            copyfile(document.doc_file.path,
                     storage.path(document.doc_file.name))
        image_file = storage.open(document.doc_file.name, 'rb')
    elif document.is_video or document.is_audio:
        image_file = open(document.find_placeholder(), 'rb')
    elif document.is_file:
        try:
            document_location = storage.path(document.doc_file.name)
        except NotImplementedError as e:
            logger.debug(e)
            document_location = storage.url(document.doc_file.name)

        try:
            image_path = render_document(document_location)
            if image_path is not None:
                try:
                    image_file = open(image_path, 'rb')
                except Exception as e:
                    logger.debug(e)
                    logger.debug(
                        "Failed to render document #{}".format(object_id))
            else:
                logger.debug("Failed to render document #{}".format(object_id))
        except ConversionError as e:
            logger.debug("Could not convert document #{}: {}.".format(
                object_id, e))
        except NotImplementedError as e:
            logger.debug("Failed to render document #{}: {}".format(
                object_id, e))

    thumbnail_content = None
    try:
        try:
            thumbnail_content = generate_thumbnail_content(image_file)
        except Exception as e:
            logger.error(
                "Could not generate thumbnail, falling back to 'placeholder': {}"
                .format(e))
            thumbnail_content = generate_thumbnail_content(
                document.find_placeholder())
    except Exception as e:
        logger.error("Could not generate thumbnail: {}".format(e))
        return
    finally:
        if image_file is not None:
            image_file.close()

        if image_path is not None:
            os.remove(image_path)

    if not thumbnail_content:
        logger.warning("Thumbnail for document #{} empty.".format(object_id))
    filename = 'document-{}-thumb.png'.format(document.uuid)
    document.save_thumbnail(filename, thumbnail_content)
    logger.debug("Thumbnail for document #{} created.".format(object_id))
Пример #55
0
 def _storage_filename(wfm_id):
     fname = f'{wfm_id}-{uuid.uuid1()}-fetch.dat'
     return default_storage.path(fname)
Пример #56
0
def download(request, resourceid, sender=Layer):

    instance = resolve_object(
        request,
        sender, {'pk': resourceid},
        permission='base.download_resourcebase',
        permission_msg=_(
            "You are not permitted to save or edit this resource."))

    if isinstance(instance, Layer):
        try:
            upload_session = instance.get_upload_session()
            layer_files = [
                item for idx, item in enumerate(
                    LayerFile.objects.filter(upload_session=upload_session))
            ]

            # Create Target Folder
            dirpath = tempfile.mkdtemp()
            dir_time_suffix = get_dir_time_suffix()
            target_folder = os.path.join(dirpath, dir_time_suffix)
            if not os.path.exists(target_folder):
                os.makedirs(target_folder)

            # Copy all Layer related files into a temporary folder
            for l in layer_files:
                if storage.exists(l.file):
                    geonode_layer_path = storage.path(l.file)
                    base_filename, original_ext = os.path.splitext(
                        geonode_layer_path)
                    shutil.copy2(geonode_layer_path, target_folder)

            # Let's check for associated SLD files (if any)
            try:
                for s in instance.styles.all():
                    sld_file_path = os.path.join(target_folder,
                                                 "".join([s.name, ".sld"]))
                    sld_file = open(sld_file_path, "w")
                    sld_file.write(s.sld_body.strip())
                    sld_file.close()

                    try:
                        sld_file = open(sld_file_path, "r")
                        response = requests.get(s.sld_url, timeout=TIMEOUT)
                        sld_remote_content = response.text
                        sld_file_path = os.path.join(
                            target_folder, "".join([s.name, "_remote.sld"]))
                        sld_file = open(sld_file_path, "w")
                        sld_file.write(sld_remote_content.strip())
                        sld_file.close()
                    except BaseException:
                        traceback.print_exc()
                        tb = traceback.format_exc()
                        logger.debug(tb)

            except BaseException:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # Let's dump metadata
            target_md_folder = os.path.join(target_folder, ".metadata")
            if not os.path.exists(target_md_folder):
                os.makedirs(target_md_folder)

            try:
                links = Link.objects.filter(resource=instance.resourcebase_ptr)
                for link in links:
                    link_name = custom_slugify(link.name)
                    link_file = os.path.join(
                        target_md_folder,
                        "".join([link_name, ".%s" % link.extension]))
                    if link.link_type in ('data'):
                        # Skipping 'data' download links
                        continue
                    elif link.link_type in ('metadata', 'image'):
                        # Dumping metadata files and images
                        link_file = open(link_file, "wb")
                        try:
                            response = requests.get(link.url,
                                                    stream=True,
                                                    timeout=TIMEOUT)
                            response.raw.decode_content = True
                            shutil.copyfileobj(response.raw, link_file)
                        except BaseException:
                            traceback.print_exc()
                            tb = traceback.format_exc()
                            logger.debug(tb)
                        finally:
                            link_file.close()
                    elif link.link_type.startswith('OGC'):
                        # Dumping OGC/OWS links
                        link_file = open(link_file, "w")
                        link_file.write(link.url.strip())
                        link_file.close()
            except BaseException:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # ZIP everything and return
            target_file_name = "".join([instance.name, ".zip"])
            target_file = os.path.join(dirpath, target_file_name)
            zip_dir(target_folder, target_file)
            response = HttpResponse(content=open(target_file),
                                    status=200,
                                    content_type="application/zip")
            response[
                'Content-Disposition'] = 'attachment; filename="%s"' % target_file_name
            return response
        except NotImplementedError:
            traceback.print_exc()
            tb = traceback.format_exc()
            logger.debug(tb)
            return HttpResponse(json.dumps({'error': 'file_not_found'}),
                                status=404,
                                content_type="application/json")

    return HttpResponse(json.dumps({'error': 'unauthorized_request'}),
                        status=403,
                        content_type="application/json")
Пример #57
0
def handle_uploaded_file(f):
    save_path = os.path.join('uploads', f.name)
    path = default_storage.save(save_path, f)

    return default_storage.path(path)
Пример #58
0
    def test_generate_filtered_attachments_zip_export(self):
        """Test media zip file export filters attachments"""
        filenames = [
            'OSMWay234134797.osm',
            'OSMWay34298972.osm',
        ]
        osm_fixtures_dir = os.path.realpath(
            os.path.join(os.path.dirname(api_tests.__file__), 'fixtures',
                         'osm'))
        paths = [
            os.path.join(osm_fixtures_dir, filename) for filename in filenames
        ]
        xlsform_path = os.path.join(osm_fixtures_dir, 'osm.xlsx')
        self._publish_xls_file_and_set_xform(xlsform_path)
        submission_path = os.path.join(osm_fixtures_dir, 'instance_a.xml')
        count = Attachment.objects.filter(extension='osm').count()
        self._make_submission_w_attachment(submission_path, paths)
        self._make_submission_w_attachment(submission_path, paths)
        self.assertTrue(
            Attachment.objects.filter(extension='osm').count() > count)

        options = {
            "extension": Export.ZIP_EXPORT,
            "query": u'{"_submission_time": {"$lte": "2019-01-13T00:00:00"}}'
        }
        filter_query = options.get("query")
        instance_ids = query_data(self.xform,
                                  fields='["_id"]',
                                  query=filter_query)

        export = generate_attachments_zip_export(Export.ZIP_EXPORT,
                                                 self.user.username,
                                                 self.xform.id_string, None,
                                                 options)

        self.assertTrue(export.is_successful)

        temp_dir = tempfile.mkdtemp()
        zip_file = zipfile.ZipFile(default_storage.path(export.filepath), "r")
        zip_file.extractall(temp_dir)
        zip_file.close()

        filtered_attachments = Attachment.objects.filter(
            instance__xform_id=self.xform.pk).filter(
                instance_id__in=[i_id['_id'] for i_id in instance_ids])

        self.assertNotEqual(Attachment.objects.count(),
                            filtered_attachments.count())

        for a in filtered_attachments:
            self.assertTrue(
                os.path.exists(os.path.join(temp_dir, a.media_file.name)))
        shutil.rmtree(temp_dir)

        # export with no query
        options.pop('query')
        export1 = generate_attachments_zip_export(Export.ZIP_EXPORT,
                                                  self.user.username,
                                                  self.xform.id_string, None,
                                                  options)

        self.assertTrue(export1.is_successful)

        temp_dir = tempfile.mkdtemp()
        zip_file = zipfile.ZipFile(default_storage.path(export1.filepath), "r")
        zip_file.extractall(temp_dir)
        zip_file.close()

        for a in Attachment.objects.all():
            self.assertTrue(
                os.path.exists(os.path.join(temp_dir, a.media_file.name)))
        shutil.rmtree(temp_dir)
Пример #59
0
    def get_context_data(self, *args, **kwargs):
        ctx = super(PDFReportView, self).get_context_data(*args, **kwargs)

        r = self.request
        randomizer = self.request.GET.get('r') or ''
        ctx['app'] = app = self.get_app()
        ctx['kwargs'] = k = self.kwargs
        report_uri = app.url_for('index')
        client_kwargs = k.copy()
        client_kwargs.pop('app', None)
        context = self.get_context_url(_full=True, **k)
        ctx['context'] = {
            'url': context,
            'parts': self.get_further_resources_inputs(**k)
        }
        fr_map = self.get_further_resources(inputs=ctx['context']['parts'],
                                            **k)
        further_resources = []
        for fr_key, fr_list in fr_map.items():
            for fr_item in fr_list:
                # we could do it with set(), but we want to preserve order
                if fr_item in further_resources:
                    continue
                further_resources.append(fr_item)
        ctx['context']['further_resources'] = further_resources

        ctx['risk_analysis'] = risk_analysis = RiskAnalysis.objects.get(
            id=k['an'])

        def p(val):
            # for test we need full fs path
            if settings.TEST:
                return default_storage.path(val)
            # otherwise, we need nice absolute url
            _path = default_storage.url(val)
            return r.build_absolute_uri(_path)

        ctx['paths'] = {
            'map':
            p(
                os.path.join(
                    context, 'map_{}.png'.format(randomizer)
                    if randomizer else 'map.png')),
            'charts': [],
            'legend':
            p(
                os.path.join(
                    context, 'legend_{}.png'.format(randomizer)
                    if randomizer else 'legend.png'))
        }

        for cidx in range(0, 4):
            chart_path = os.path.join(
                context, 'chart_{}_{}.png'.format(cidx, randomizer)
                if randomizer else 'chart_{}.png'.format(cidx))
            if not os.path.exists(default_storage.path(chart_path)):
                continue
            chart_f = p(chart_path)
            ctx['paths']['charts'].append(chart_f)

        ctx['resources'] = {}
        ctx['resources']['permalink'] = '{}?init={}'.format(
            r.build_absolute_uri(report_uri),
            self.get_client_url(app, **client_kwargs))

        for resname in (
                'permalink',
                'dims',
                'dimsVal',
        ):
            _fname = os.path.join(
                context, '{}_{}.txt'.format(resname, randomizer)
                if randomizer else '{}.txt'.format(resname))
            fname = default_storage.path(_fname)
            if os.path.exists(fname):
                with open(fname, 'rt') as f:
                    data = json.loads(f.read())
                    ctx['resources'][resname] = data

        ctx['dimensions'] = self.get_dimensions(risk_analysis,
                                                ctx['resources'])
        return ctx
Пример #60
0
def get_arp_root():
    if not default_storage.exists(settings.ARP_ROOT):
        os.mkdir(default_storage.path(settings.ARP_ROOT))
    return settings.ARP_ROOT