Esempio n. 1
0
    def check_availability(self):
        """
        Perform check against Default Storage.
        """
        try:
            name = default_storage.get_valid_name('Informer Storage')

            # Save data.
            content = ContentFile('File used by StorageInformer checking.')
            path = default_storage.save(name, content)

            # Check properties.
            default_storage.size(path)
            default_storage.url(path)
            default_storage.path(path)
            default_storage.modified_time(path)
            default_storage.created_time(path)

            # And remove file.
            default_storage.delete(path)

            storage = default_storage.__class__.__name__
        except Exception as error:
            raise InformerException(
                'A error occured when trying access your database: %s' % error)
        else:
            return True, 'Your %s is operational.' % storage
Esempio n. 2
0
def thumbnail(file, size='200x200'):
    # defining the size
    x, y = [int(x) for x in size.split('x')]
    # defining the filename and the miniature filename
    filehead, filetail = os.path.split(file.name)
    basename, format = os.path.splitext(filetail)
    miniature = basename + '_' + size + format
    filename = file.name
    miniature_filename = os.path.join(filehead, miniature)
    filehead, filetail = os.path.split(file.url)
    miniature_url = filehead + '/' + miniature

    thumbnail_exist = False
    if default_storage.exists(miniature_filename):
        mt_filename = default_storage.modified_time(filename)
        mt_miniature_filename = default_storage.modified_time(
                                                miniature_filename)
        if mt_filename > mt_miniature_filename:
            # remove the miniature
            default_storage.delete(miniature_filename)
        else:
            thumbnail_exist = True

    # if the image wasn't already resized, resize it
    if not thumbnail_exist:
        #image = Image.open(filename)
        image = Image.open(default_storage.open(filename))
        image.thumbnail([x, y], Image.ANTIALIAS)

        f = default_storage.open(miniature_filename, 'w')
        image.save(f, image.format, quality=90, optimize=1)
        f.close()

    return miniature_url
Esempio n. 3
0
def thumbnail(file, size="200x200"):
    # defining the size
    x, y = [int(x) for x in size.split("x")]
    # defining the filename and the miniature filename
    filehead, filetail = os.path.split(file.name)
    basename, format = os.path.splitext(filetail)
    miniature = basename + "_" + size + format
    filename = file.name
    miniature_filename = os.path.join(filehead, miniature)
    filehead, filetail = os.path.split(file.url)
    miniature_url = filehead + "/" + miniature

    thumbnail_exist = False
    if default_storage.exists(miniature_filename):
        mt_filename = default_storage.modified_time(filename)
        mt_miniature_filename = default_storage.modified_time(miniature_filename)
        if mt_filename > mt_miniature_filename:
            # remove the miniature
            default_storage.delete(miniature_filename)
        else:
            thumbnail_exist = True

    # if the image wasn't already resized, resize it
    if not thumbnail_exist:
        # image = Image.open(filename)
        image = Image.open(default_storage.open(filename))
        image.thumbnail([x, y], Image.ANTIALIAS)

        f = default_storage.open(miniature_filename, "w")
        image.save(f, image.format, quality=90, optimize=1)
        f.close()

    return miniature_url
Esempio n. 4
0
def serve(request, path):
    """
    Serve static files below a given point in the directory structure.

    To use, put a URL pattern such as::

        (r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root' : '/path/to/my/files/'})
    """
    # file_record = File.objects.filter(path=path).first()

    if not default_storage.exists(path):
        raise Http404(u'页面不存在')

    #  TODO  if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
    #                             statobj.st_mtime, statobj.st_size):
    #        return HttpResponseNotModified()
    last_modified = formatdate(
        time.mktime(default_storage.modified_time(path).timetuple()))
    content_type, encoding = mimetypes.guess_type(path)
    content_type = content_type or 'application/octet-stream'

    file_data = default_storage.open(path).read()
    response = HttpResponse(file_data, content_type=content_type)

    response["Last-Modified"] = last_modified
    response["Content-Length"] = default_storage.size(path)

    if encoding:
        response["Content-Encoding"] = encoding
    print 'serving file %s %s' % (len(file_data), path)
    return response
Esempio n. 5
0
 def _date(self):
     if self._date_stored != None:
         return self._date_stored
     if self.exists():
         self._date_stored = time.mktime(default_storage.modified_time(self.path).timetuple())
         return self._date_stored
     return None
Esempio n. 6
0
def cleanup_extracted_file():
    log.info('Removing extracted files for file viewer.')
    root = os.path.join(settings.TMP_PATH, 'file_viewer')
    # Local storage uses local time for file modification. S3 uses UTC time.
    now = datetime.utcnow if storage_is_remote() else datetime.now
    for path in storage.listdir(root)[0]:
        full = os.path.join(root, path)
        age = now() - storage.modified_time(os.path.join(full,
                                                         'manifest.webapp'))
        if age.total_seconds() > (60 * 60):
            log.debug('Removing extracted files: %s, %dsecs old.' %
                      (full, age.total_seconds()))
            for subroot, dirs, files in walk_storage(full):
                for f in files:
                    storage.delete(os.path.join(subroot, f))
            # Nuke out the file and diff caches when the file gets removed.
            id = os.path.basename(path)
            try:
                int(id)
            except ValueError:
                continue

            key = hashlib.md5()
            key.update(str(id))
            cache.delete('%s:memoize:%s:%s' % (settings.CACHE_PREFIX,
                                               'file-viewer', key.hexdigest()))
Esempio n. 7
0
def story_download(request, story_id, filename, extension):
    from django.core.files.storage import default_storage as storage
    from ..downloads import get_format
    
    debug = settings.DEBUG and 'debug' in request.META['QUERY_STRING']
    
    story = get_object_or_404(Story, pk=story_id)
    fmt = get_format(extension)
    if fmt is None:
        raise Http404
    
    url = fmt.url(story)
    if url != request.path:
        return redirect(url)
    filepath = 'stories/%s/%s.%s' % (story_id, filename, extension)
    
    if (not storage.exists(filepath) or 
        storage.modified_time(filepath) < story.updated or
        debug):
        
        data = fmt.render(
            story=story,
            filename=filename,
            extension=extension,
            debug=debug,
        )
        if not debug:
            storage.save(filepath, ContentFile(data))
        
    if not debug:
        return redirect(storage.url(filepath))
    else:
        response = HttpResponse(data)
        response['Content-Type'] = fmt.debug_content_type
        return response
Esempio n. 8
0
 def _date(self):
     if self._date_stored != None:
         return self._date_stored
     if self.exists():
         self._date_stored = time.mktime(default_storage.modified_time(self.path).timetuple())
         return self._date_stored
     return None
Esempio n. 9
0
 def test_modified_time(self):
     with self.save_file():
         modified_time = default_storage.modified_time("test.txt")
         logging.info("modified time: %s", modified_time)
         self.assertTrue(is_naive(modified_time))
         self.assertLess(abs(modified_time - make_naive(timezone.now(), utc)), timedelta(seconds=10))
         self.assertEqual(default_storage.accessed_time("test.txt"), modified_time)
         self.assertEqual(default_storage.created_time("test.txt"), modified_time)
Esempio n. 10
0
 def testModifiedTime(self):
     with self.save_file():
         modified_time = default_storage.modified_time("foo.txt")
         # Check that the timestamps are roughly equals.
         self.assertLess(abs(modified_time - make_naive(timezone.now(), utc)), timedelta(seconds=10))
         # All other timestamps are slaved to modified time.
         self.assertEqual(default_storage.accessed_time("foo.txt"), modified_time)
         self.assertEqual(default_storage.created_time("foo.txt"), modified_time)
    def handle(self, *args, **options):
        for exported_file in default_storage.listdir(EXPORTS_SAVE_PATH)[1]:
            if exported_file == 'DONOTDELTETHISFOLDER':
                continue

            exported_file = '%s%s' % (EXPORTS_SAVE_PATH, exported_file)
            modified_time_delta = datetime.datetime.now() - default_storage.modified_time(exported_file)
            if modified_time_delta > datetime.timedelta(days=1):
                default_storage.delete(exported_file)
Esempio n. 12
0
        def compress_files(directories, dirtree, new_only=False):
            compressed_flag = settings.DIET_FLAG_PROCESSED_FILE

            for f in directories[1]:  # files from listdir

                if f and f.lower().endswith(('.png', '.jpg', '.jpeg',
                                             '.gif')):  # sometimes if == [u'']
                    dir_path = os.sep.join(dirtree)
                    path = os.path.join(dir_path, f)
                    flagged_file_name = '.%s.%s' % (f, compressed_flag)
                    flag_path = os.path.join(dir_path, flagged_file_name)
                    print("Processing %s" % path)
                    if new_only:
                        should_process_file = False

                        if not default_storage.exists(flag_path):
                            should_process_file = True
                        else:
                            file_mt = default_storage.modified_time(path)
                            try:
                                flag_mt = default_storage.modified_time(
                                    flag_path)
                            except AttributeError:
                                flag_mt = datetime.datetime.fromtimestamp(0)
                            if flag_mt < file_mt:
                                should_process_file = True

                        if should_process_file:
                            process_file(path)
                    else:
                        process_file(path)

                    # add flag, for all files. This flag is used only when "new_only" option is called.
                    if default_storage.exists(flag_path):
                        default_storage.delete(flag_path)
                    default_storage.save(flag_path, ContentFile(""))

            for d in directories[0]:  # directories from list_dir
                dirtree.append(d)
                d = default_storage.listdir(os.sep.join(dirtree))
                compress_files(d, dirtree, new_only)
                dirtree.pop()  # remove last item, not needed anymore
Esempio n. 13
0
 def test_create_thumbnails_command(self):
     call_command("create_image_thumbnails")
     created_times = {}
     for attachment in Attachment.objects.filter(instance=self.instance):
         filename = attachment.media_file.name.replace('.jpg', '')
         for size in settings.THUMB_CONF.keys():
             thumbnail = '%s-%s.jpg' % (filename, size)
             self.assertTrue(default_storage.exists(thumbnail))
             created_times[size] = default_storage.modified_time(thumbnail)
     # replace or regenerate thumbnails if they exist
     call_command("create_image_thumbnails", force=True)
     for attachment in Attachment.objects.filter(instance=self.instance):
         filename = attachment.media_file.name.replace('.jpg', '')
         for size in settings.THUMB_CONF.keys():
             thumbnail = '%s-%s.jpg' % (filename, size)
             self.assertTrue(default_storage.exists(thumbnail))
             self.assertTrue(
                 default_storage.modified_time(thumbnail) >
                 created_times[size])
             default_storage.delete(thumbnail)
Esempio n. 14
0
    def _get_files(self):
        all_files, res = [], OrderedDict()

        # Not using os.path.walk so we get just the right order.
        def iterate(path):
            path_dirs, path_files = storage.listdir(path)
            for dirname in sorted(path_dirs):
                full = os.path.join(path, dirname)
                all_files.append(full)
                iterate(full)

            for filename in sorted(path_files):
                full = os.path.join(path, filename)
                all_files.append(full)

        iterate(self.dest)

        for path in all_files:
            filename = smart_unicode(os.path.basename(path), errors='replace')
            short = smart_unicode(path[len(self.dest) + 1:], errors='replace')
            mime, encoding = mimetypes.guess_type(filename)
            if not mime and filename == 'manifest.webapp':
                mime = 'application/x-web-app-manifest+json'
            if storage_is_remote():
                # S3 doesn't have directories, so we check for names with this
                # prefix and call it a directory if there are some.
                subdirs, subfiles = storage.listdir(path)
                directory = bool(subdirs or subfiles)
            else:
                directory = os.path.isdir(path)

            res[short] = {
                'binary': self._is_binary(mime, path),
                'depth': short.count(os.sep),
                'directory': directory,
                'filename': filename,
                'full': path,
                'md5': get_md5(path) if not directory else '',
                'mimetype': mime or 'application/octet-stream',
                'syntax': self.get_syntax(filename),
                'modified': (
                    time.mktime(storage.modified_time(path).timetuple())
                    if not directory else 0),
                'short': short,
                'size': storage.size(path) if not directory else 0,
                'truncated': self.truncate(filename),
                'url': reverse('mkt.files.list',
                               args=[self.file.id, 'file', short]),
                'url_serve': reverse('mkt.files.redirect',
                                     args=[self.file.id, short]),
                'version': self.file.version.version,
            }

        return res
Esempio n. 15
0
def get_file_modified_time_utc(file_path):
    """
    Gets the UTC timezone-aware modified time of a file at the given file path
    """
    file_timezone = (
        # time.tzname returns a 2 element tuple:
        #   (local non-DST timezone, e.g.: 'EST', local DST timezone, e.g.: 'EDT')
        pytz.timezone(time.tzname[0]) if settings.DEFAULT_FILE_STORAGE
        == 'django.core.files.storage.FileSystemStorage' else pytz.utc)
    return file_timezone.localize(
        default_storage.modified_time(file_path)).astimezone(pytz.utc)
        def compress_files(data, dirtree, new_only=False):
            django_basic_cms_compressed_flag = "dbc_compressed"

            for f in data[1]:  # files from listdir

                if f and f.lower().endswith(('.png', '.jpg', '.jpeg',
                                             '.gif')):  # sometimes if == [u'']
                    dir_path = os.sep.join(dirtree)
                    path = os.path.join(dir_path, f)
                    flagged_file_name = '.%s.%s' % (
                        f, django_basic_cms_compressed_flag)
                    flag_path = os.path.join(dir_path, flagged_file_name)
                    print("Processing %s" % path)
                    if new_only:
                        should_process_file = False

                        if not default_storage.exists(flag_path):
                            should_process_file = True
                        else:
                            file_mt = default_storage.modified_time(path)
                            flag_mt = default_storage.modified_time(flag_path)
                            if flag_mt < file_mt:
                                should_process_file = True

                        if should_process_file:
                            process_file(path)
                    else:
                        process_file(path)

                    # add flag, for all files. This flag is used only when "new_only" option is called.
                    if default_storage.exists(flag_path):
                        default_storage.delete(flag_path)
                    default_storage.save(flag_path, ContentFile(""))

            for d in data[0]:  # directories from list_dir
                dirtree.append(d)
                d = default_storage.listdir(os.sep.join(dirtree))
                compress_files(d, dirtree, new_only)
                dirtree.pop()  # remove last item, not needed anymore
Esempio n. 17
0
def clean_old_signed(seconds=60 * 60):
    """Clean out apps signed for reviewers."""
    log.info('Removing old apps signed for reviewers')
    root = settings.SIGNED_APPS_REVIEWER_PATH
    # Local storage uses local time for file modification. S3 uses UTC time.
    now = datetime.utcnow if storage_is_remote() else datetime.now
    for nextroot, dirs, files in walk_storage(root):
        for fn in files:
            full = os.path.join(nextroot, fn)
            age = now() - storage.modified_time(full)
            if age.total_seconds() > seconds:
                log.debug('Removing signed app: %s, %dsecs old.' % (
                    full, age.total_seconds()))
                storage.delete(full)
Esempio n. 18
0
def get_file_modified_time_utc(file_path):
    """
    Gets the UTC timezone-aware modified time of a file at the given file path
    """
    file_timezone = (
        # time.tzname returns a 2 element tuple:
        #   (local non-DST timezone, e.g.: 'EST', local DST timezone, e.g.: 'EDT')
        pytz.timezone(time.tzname[0])
        if settings.DEFAULT_FILE_STORAGE == 'django.core.files.storage.FileSystemStorage'
        else pytz.utc
    )
    return file_timezone.localize(
        default_storage.modified_time(file_path)
    ).astimezone(
        pytz.utc
    )
Esempio n. 19
0
def directory_cleanup(dir_path, ndays):
    """
    Delete the files that are older than 'ndays' in the directory 'dir_path'
    The 'dir_path' should be a relative path. We cannot use os.walk.
    """
    foldernames, filenames = default_storage.listdir(dir_path)
    for filename in filenames:
        if not filename:
            continue
        file_path = os.path.join(dir_path, filename)
        modified_dt = default_storage.modified_time(file_path)
        if modified_dt + timedelta(days=ndays) < datetime.now():
            # the file is older than ndays, delete it
            default_storage.delete(file_path)
    for foldername in foldernames:
        folder_path = os.path.join(dir_path, foldername)
        directory_cleanup(folder_path, ndays)
Esempio n. 20
0
def directory_cleanup(dir_path, ndays):
    """
    Delete the files that are older than 'ndays' in the directory 'dir_path'
    The 'dir_path' should be a relative path. We cannot use os.walk.
    """
    foldernames, filenames = default_storage.listdir(dir_path)
    for filename in filenames:
        if not filename:
            continue
        file_path = os.path.join(dir_path, filename)
        modified_dt = default_storage.modified_time(file_path)
        if modified_dt + timedelta(days=ndays) < datetime.now():
            # the file is older than ndays, delete it
            default_storage.delete(file_path)
    for foldername in foldernames:
        folder_path = os.path.join(dir_path, foldername)
        directory_cleanup(folder_path, ndays)
Esempio n. 21
0
 def test_create_thumbnails_command(self):
     call_command("create_image_thumbnails")
     for attachment in Attachment.objects.filter(instance=self.instance):
         filename = attachment.media_file.name.replace('.jpg', '')
         for size in ['small', 'medium', 'large']:
             thumbnail = '%s-%s.jpg' % (filename, size)
             self.assertTrue(default_storage.exists(thumbnail))
     check_datetime = datetime.now()
     # replace or regenerate thumbnails if they exist
     call_command("create_image_thumbnails", force=True)
     for attachment in Attachment.objects.filter(instance=self.instance):
         filename = attachment.media_file.name.replace('.jpg', '')
         for size in ['small', 'medium', 'large']:
             thumbnail = '%s-%s.jpg' % (filename, size)
             self.assertTrue(default_storage.exists(thumbnail))
             self.assertTrue(
                 default_storage.modified_time(thumbnail) > check_datetime)
             default_storage.delete(thumbnail)
Esempio n. 22
0
 def test_create_thumbnails_command(self):
     call_command("create_image_thumbnails")
     for attachment in Attachment.objects.filter(instance=self.instance):
         filename = attachment.media_file.name.replace('.jpg', '')
         for size in ['small', 'medium', 'large']:
             thumbnail = '%s-%s.jpg' % (filename, size)
             self.assertTrue(
                 default_storage.exists(thumbnail))
     check_datetime = datetime.now()
     # replace or regenerate thumbnails if they exist
     call_command("create_image_thumbnails", force=True)
     for attachment in Attachment.objects.filter(instance=self.instance):
         filename = attachment.media_file.name.replace('.jpg', '')
         for size in ['small', 'medium', 'large']:
             thumbnail = '%s-%s.jpg' % (filename, size)
             self.assertTrue(
                 default_storage.exists(thumbnail))
             self.assertTrue(
                 default_storage.modified_time(thumbnail) > check_datetime)
             default_storage.delete(thumbnail)
Esempio n. 23
0
def get_archive_list(force=False, from_date=None):
	"""
	Gets a list of APOD items from the APOD archive page
	Only downloads the file if out of date

	Returns a list of dicts with publish_date and title keys
	"""
	cache_file = '%s/archive.html' % CACHE_FOLDER

	if storage.exists(cache_file) and (force or storage.modified_time(cache_file).date() < datetime.date.today()):
		storage.delete(cache_file)

	# Download archive HTML if needed
	if not storage.exists(cache_file):
		f = urllib2.urlopen(settings.APOD_ARCHIVE_URL)
		html = f.read()

		# Write it to disk
		storage.save(cache_file, ContentFile(html))

	# Read HTML
	with storage.open(cache_file) as f:
		html = f.read()

	# Parse HTML
	html_soup = BSoup(html)
	archive_links = html_soup.b.findAll("a")

	for link in archive_links:
		publish_date = dateparser.parse(link.previous.strip().strip(':')).date()
		if from_date and publish_date <= from_date:
			return

		yield {
			'publish_date': publish_date,
			'title': unicode(link.next),
		}
def get_course_report_download_details(course_id, report_name):
    """
    Determine the path that the report file should be located at,
    then return metadata sufficient for downloading it.
    """
    report_location_template = getattr(
        settings,
        'COURSE_REPORT_FILE_LOCATION_TEMPLATE',
        '{course_id}_{report_name}.csv'
    )
    # Course IDs contain characters that may not be valid in various
    # filesystems; here we remove them before looking for the file or
    # creating the downloadable filename.
    course_id = get_filename_safe_course_id(course_id)
    report_location = report_location_template.format(
        course_id=course_id,
        report_name=report_name
    )
    try:
        if not default_storage.exists(report_location):
            raise ReportFileNotFoundError(course_id=course_id, report_name=report_name)
    except (
            AttributeError,
            NotImplementedError,
            ImportError,
            SuspiciousFileOperation,
            SuspiciousOperation
    ):
        # Error out if:
        # - We don't have a method to determine file existence
        # - Such a method isn't implemented
        # - We can't import the specified storage class
        # - We don't have privileges for the specified file location
        raise CannotCreateReportDownloadLinkError

    try:
        last_modified = default_storage.modified_time(report_location)
    except (NotImplementedError, AttributeError):
        last_modified = None

    try:
        download_size = default_storage.size(report_location)
    except (NotImplementedError, AttributeError):
        download_size = None

    download_filename = '{}-{}-{}.csv'.format(
        course_id,
        report_name,
        # We need a date for the filename; if we don't know when it was last modified,
        # use the current date and time to stamp the filename.
        (last_modified or datetime.datetime.utcnow()).strftime('%Y%m%dT%H%M%SZ')
    )
    url, expiration_date = get_file_object_url(report_location, download_filename)

    details = {
        'course_id': course_id,
        'report_name': report_name,
        'download_url': url
    }
    # These are all optional items that aren't guaranteed. The URL isn't guaranteed
    # either, but we'll raise an exception earlier if we don't have it.
    if last_modified is not None:
        details.update({'last_modified': last_modified.strftime(settings.DATETIME_FORMAT)})
    if expiration_date is not None:
        details.update({'expiration_date': expiration_date.strftime(settings.DATETIME_FORMAT)})
    if download_size is not None:
        details.update({'file_size': download_size})
    return details
Esempio n. 25
0
    def _get_files(self):
        all_files, res = [], OrderedDict()

        # Not using os.path.walk so we get just the right order.
        def iterate(path):
            path_dirs, path_files = storage.listdir(path)
            for dirname in sorted(path_dirs):
                full = os.path.join(path, dirname)
                all_files.append(full)
                iterate(full)

            for filename in sorted(path_files):
                full = os.path.join(path, filename)
                all_files.append(full)

        iterate(self.dest)

        for path in all_files:
            filename = smart_unicode(os.path.basename(path), errors='replace')
            short = smart_unicode(path[len(self.dest) + 1:], errors='replace')
            mime, encoding = mimetypes.guess_type(filename)
            if not mime and filename == 'manifest.webapp':
                mime = 'application/x-web-app-manifest+json'
            if storage_is_remote():
                # S3 doesn't have directories, so we check for names with this
                # prefix and call it a directory if there are some.
                subdirs, subfiles = storage.listdir(path)
                directory = bool(subdirs or subfiles)
            else:
                directory = os.path.isdir(path)

            res[short] = {
                'binary':
                self._is_binary(mime, path),
                'depth':
                short.count(os.sep),
                'directory':
                directory,
                'filename':
                filename,
                'full':
                path,
                'md5':
                get_md5(path) if not directory else '',
                'mimetype':
                mime or 'application/octet-stream',
                'syntax':
                self.get_syntax(filename),
                'modified':
                (time.mktime(storage.modified_time(path).timetuple())
                 if not directory else 0),
                'short':
                short,
                'size':
                storage.size(path) if not directory else 0,
                'truncated':
                self.truncate(filename),
                'url':
                reverse('mkt.files.list', args=[self.file.id, 'file', short]),
                'url_serve':
                reverse('mkt.files.redirect', args=[self.file.id, short]),
                'version':
                self.file.version.version,
            }

        return res
Esempio n. 26
0
 def mtime(self):
     try:
         return default_storage.modified_time(self.get_local_path())
     except FileNotFoundError:
         return None
Esempio n. 27
0
def get_course_report_download_details(course_id, report_name):
    """
    Determine the path that the report file should be located at,
    then return metadata sufficient for downloading it.
    """
    report_location_template = getattr(settings,
                                       'COURSE_REPORT_FILE_LOCATION_TEMPLATE',
                                       '{course_id}_{report_name}.csv')
    # Course IDs contain characters that may not be valid in various
    # filesystems; here we remove them before looking for the file or
    # creating the downloadable filename.
    course_id = get_filename_safe_course_id(course_id)
    report_location = report_location_template.format(course_id=course_id,
                                                      report_name=report_name)
    try:
        if not default_storage.exists(report_location):
            raise ReportFileNotFoundError(course_id=course_id,
                                          report_name=report_name)
    except (AttributeError, NotImplementedError, ImportError,
            SuspiciousFileOperation, SuspiciousOperation):
        # Error out if:
        # - We don't have a method to determine file existence
        # - Such a method isn't implemented
        # - We can't import the specified storage class
        # - We don't have privileges for the specified file location
        raise CannotCreateReportDownloadLinkError

    try:
        last_modified = default_storage.modified_time(report_location)
    except (NotImplementedError, AttributeError):
        last_modified = None

    try:
        download_size = default_storage.size(report_location)
    except (NotImplementedError, AttributeError):
        download_size = None

    download_filename = '{}-{}-{}.csv'.format(
        course_id,
        report_name,
        # We need a date for the filename; if we don't know when it was last modified,
        # use the current date and time to stamp the filename.
        (last_modified
         or datetime.datetime.utcnow()).strftime('%Y%m%dT%H%M%SZ'))
    url, expiration_date = get_file_object_url(report_location,
                                               download_filename)

    details = {
        'course_id': course_id,
        'report_name': report_name,
        'download_url': url
    }
    # These are all optional items that aren't guaranteed. The URL isn't guaranteed
    # either, but we'll raise an exception earlier if we don't have it.
    if last_modified is not None:
        details.update({
            'last_modified':
            last_modified.strftime(settings.DATETIME_FORMAT)
        })
    if expiration_date is not None:
        details.update({
            'expiration_date':
            expiration_date.strftime(settings.DATETIME_FORMAT)
        })
    if download_size is not None:
        details.update({'file_size': download_size})
    return details
Esempio n. 28
0
 def _get_key(image):
     if order_titles:
         return image['title']
     return default_storage.modified_time(image['src'])
Esempio n. 29
0
 def test_mtime(self):
     """ Ensure we can get the modified time """
     mtime = default_storage.modified_time(self.filepath)
     self.assertIsNotNone(mtime)
    def generate(self):
        if hasattr(self.dest, 'write'):
            self._do_generate()
        else:
            do_generate = False
            if self.cache_dir is not None:
                if isinstance(self.source_image, FieldFile) or \
                   isinstance(self.source_image, File):
                    source_image = force_unicode(self.source_image)
                elif not isinstance(self.source_image, basestring):
                    source_image = pickle.dumps(self.source_image.read())
                    self.source_image.seek(0)
                else:
                    source_image = smart_str(force_unicode(self.source_image))

                source_image = os.path.join(self.cache_dir,
                                      md5_constructor(source_image).hexdigest())
                if not os.path.exists(source_image):
                    path = os.path.split(source_image)[0]
                    if not os.path.exists(path):
                        os.makedirs(path)
                    open(source_image, 'w').close()
                if not isinstance(self.dest, basestring):
                    dest = pickle.dumps(self.dest.read())
                    self.dest.seek(0)
                else:
                    dest = smart_str(force_unicode(self.dest))
                dest = os.path.join(self.cache_dir,
                                      md5_constructor(dest).hexdigest())
            else:
                source_image = force_unicode(self.source_image)
                dest = self.dest

            # If the destination file does not exist then generate it
            if not os.path.exists(dest):
                do_generate = True
            else:
                # otherwise do this hodge podge of time comparisons
                if hasattr(default_storage, 'modified_time') and not self.cache_dir:

                    do_generate = default_storage.modified_time(source_image) > \
                            default_storage.modified_time(dest)

                elif hasattr(default_storage, 'getmtime') and not self.cache_dir:
                    # An old custom method from before Django supported
                    # modified_time(). Kept around for backwards compatibility.
                    do_generate = default_storage.getmtime(source_image) > \
                            default_storage.getmtime(dest)
                else:
                    if not self.cache_dir:
                        source_image_cache = os.path.join(settings.MEDIA_ROOT, source_image)
                        dest_cache = os.path.join(settings.MEDIA_ROOT, dest)
                    else:
                        source_image_cache, dest_cache = source_image, dest
                    try:
                        do_generate = os.path.getmtime(source_image_cache) > \
                                os.path.getmtime(dest_cache)
                    except OSError:
                        do_generate = True

            if do_generate:
                if self.cache_dir is not None:
                    path = os.path.split(dest)[0]
                    if not os.path.exists(path):
                        os.makedirs(path)
                    open(dest, 'w').close()
                try:
                    self._do_generate()
                except:
                    if self.cache_dir is not None:
                        if os.path.exists(dest):
                            os.remove(dest)
                    raise
Esempio n. 31
0
    def generate(self):
        if hasattr(self.dest, 'write'):
            self._do_generate()
        else:
            do_generate = False
            if self.cache_dir is not None:
                if isinstance(self.source, FieldFile) or \
                   isinstance(self.source, File):
                    source = force_unicode(self.source)
                elif not isinstance(self.source, basestring):
                    source = pickle.dumps(self.source.read())
                    self.source.seek(0)
                else:
                    source = smart_str(force_unicode(self.source))
                source = os.path.join(self.cache_dir,
                                      hashlib.md5(source).hexdigest())
                if not os.path.exists(source):
                    path = os.path.split(source)[0]
                    if not os.path.exists(path):
                        os.makedirs(path)
                    open(source, 'w').close()
                if not isinstance(self.dest, basestring):
                    dest = pickle.dumps(self.dest.read())
                    self.dest.seek(0)
                else:
                    dest = smart_str(force_unicode(self.dest))
                dest = os.path.join(self.cache_dir,
                                      hashlib.md5(dest).hexdigest())
            else:
                source = force_unicode(self.source)
                dest = self.dest

            if hasattr(default_storage, 'modified_time') and not self.cache_dir:
                try:
                    source_mod_time = default_storage.modified_time(source)
                except EnvironmentError:
                    # Means the source file doesn't exist, so nothing can be
                    # done.
                    do_generate = False
                else:
                    try:
                        dest_mod_time = default_storage.modified_time(dest)
                    except EnvironmentError:
                        # Means the destination file doesn't exist so it must be
                        # generated.
                        do_generate = True
                    else:
                        do_generate = source_mod_time > dest_mod_time
            else:
                if not self.cache_dir:
                    source_cache = os.path.join(settings.MEDIA_ROOT, source)
                    dest_cache = os.path.join(settings.MEDIA_ROOT, dest)
                else:
                    source_cache, dest_cache = source, dest
                try:
                    do_generate = os.path.getmtime(source_cache) > \
                            os.path.getmtime(dest_cache)
                except OSError:
                    do_generate = True

            if do_generate:
                if self.cache_dir is not None:
                    path = os.path.split(dest)[0]
                    if not os.path.exists(path):
                        os.makedirs(path)
                    open(dest, 'w').close()
                try:
                    self._do_generate()
                except:
                    if self.cache_dir is not None:
                        if os.path.exists(dest):
                            os.remove(dest)
                    raise
Esempio n. 32
0
 def date(self):
     if self.exists:
         return time.mktime(
             default_storage.modified_time(self.path).timetuple())
     return None
Esempio n. 33
0
 def date(self):
     if self.exists:
         return time.mktime(
             default_storage.modified_time(self.path).timetuple())
     return None