def collect_files(self): """Find and write static files. Along the way ignore the "compiled" directory, if present""" write_storage = StaticFilesStorage(self.BUILD_DIR + "/static/") original_dirs = settings.STATICFILES_DIRS settings.STATICFILES_DIRS = [s for s in original_dirs if s != 'compiled'] for prefixed_path, source_file in self._input_files(): write_storage.save(prefixed_path, source_file) settings.STATICFILES_DIRS = original_dirs
def inner(request,dir_): static_file_storage = StaticFilesStorage() files = list(static_file_storage.listdir(dir_)) for f in files: if len(f) > 0: out = f files = f[0] fullpath = os.path.join(dir_,files) print(fullpath) ctx = {'img':fullpath} return render(request,'main/inner.html',ctx)
def handle(self, *args, **options): local_storage = StaticFilesStorage() base_path = local_storage.base_location # Ignore files in our ignore patterns ignore_patterns = getattr(settings, 'SYNCSTATIC_IGNORE_PATTERNS', None) files = set(utils.get_files(local_storage, ignore_patterns=ignore_patterns)) # Remove any files that went into compilation files -= set(settings.PIPELINE_JS['main']['source_filenames']) files -= set(settings.PIPELINE_CSS['main']['source_filenames']) for file in files: print('syncing to s3: %s' % file) staticfiles_storage.save(file, local_storage.open(file, 'r'))
def handle(self, *args, **options): local_storage = StaticFilesStorage() base_path = local_storage.base_location # Ignore files in our ignore patterns ignore_patterns = getattr(settings, 'SYNCSTATIC_IGNORE_PATTERNS', None) files = set( utils.get_files(local_storage, ignore_patterns=ignore_patterns)) # Remove any files that went into compilation files -= set(settings.PIPELINE_JS['main']['source_filenames']) files -= set(settings.PIPELINE_CSS['main']['source_filenames']) for file in files: print('syncing to s3: %s' % file) staticfiles_storage.save(file, local_storage.open(file, 'r'))
def list(self, ignore_patterns): print 'CALLING list with:', ignore_patterns from django.conf import settings urlconf = __import__(settings.ROOT_URLCONF, {}, {}, ['']) views = extract_views_from_urlpatterns(urlconf.urlpatterns) views = [view for view in views if getattr(view[0], 'block', None)] css = get_css_for_pages(views) ensure_dir(settings.BEMPY_STATIC_DIR) css_filename = os.path.join(settings.BEMPY_STATIC_DIR, 'bempy.css') with open(css_filename, 'w') as f: f.write(css.encode('utf-8')) js = get_js_for_pages(views) js_filename = os.path.join(settings.BEMPY_STATIC_DIR, 'bempy.js') with open(js_filename, 'w') as f: f.write(js.encode('utf-8')) storage = StaticFilesStorage(settings.BEMPY_STATIC_DIR) return [ ('bempy.css', storage), ('bempy.js', storage), # ('bempy/bempy.css', storage.path('bempy.css')), # ('bempy/bempy.js', storage.path('bempy.js')) ]
def helppages_index(request): s = StaticFilesStorage() pages = list(get_files(s, location='helppages')) slugs = [x[10:-5] for x in pages] pks = [x[10:-1].split('_')[0] for x in pages] return render(request, 'article_index.html', locals())
def test_get_file_hash(case): # type: (TestCase) -> None strategy = Strategy() local_storage = StaticFilesStorage() with tempfile.NamedTemporaryFile(dir=local_storage.base_location) as f: f.write(b"spam") hash_ = strategy.get_local_file_hash(f.name, local_storage) case.assertTrue(re.fullmatch(r"^[A-z0-9]{32}$", hash_) is not None)
def main(request): static_file_storage = StaticFilesStorage() dirs = list(static_file_storage.listdir('.')) for d in dirs: if len(d) > 0: out = d dirs = out #print(dirs) #files = list(get_files(static_file_storage,location = 'assets')) files = {'dirs':dirs} try: import os #print(os.listdir('..')) except: pass return render(request,'main/main.html',files)
def invoke_collect(self, layer, path): """ Invoke collect, reset all instance storage first and initialize a self.storage that's bound to the layers target STATIC_ROOT """ self.stdout.write("Collecting layer %s to path %s\n" % (layer, path)) self.storage = StaticFilesStorage(path) try: self.storage.path('') except NotImplementedError: self.local = False else: self.local = True self.copied_files = [] self.symlinked_files = [] self.unmodified_files = [] self.post_processed_files = [] self.layer = layer return self.collect()
def test_get_file_hash(case): # disable this test on appveyor until permissions issue is solved if platform.system() == 'Windows': return storage = StaticFilesStorage() with tempfile.NamedTemporaryFile(dir=storage.base_location) as f: f.write(b'spam') h = etag.get_file_hash(storage, f.name) case.assertEqual(len(h), 34) case.assertTrue(h.startswith('"')) case.assertTrue(h.endswith('"')) for c in h[1:-1]: case.assertIn(c, hash_characters)
class CvTemplateForm(forms.Form): s = StaticFilesStorage() repertoire = [] repertoire_pas_propre = list(get_files(s, location='collab')) for elt in repertoire_pas_propre: if "assets" in elt: pass else: nomDoc = elt.replace("collab\\", '') if ".docx" in nomDoc: valeur = (nomDoc, nomDoc) repertoire.append(valeur) else: pass template = forms.CharField(label='Quel template voulez-vous ?', widget=forms.Select(choices=repertoire)) collabid = forms.CharField(widget=forms.HiddenInput(), required=True)
def test_templatetag(template_string, filename): storage = StaticFilesStorage(location=settings.STATICFILES_TEST_DIR) try: storage.save(name=filename, content=ContentFile("body { background: red; }")) resp = T(template_string).render(CTX).strip().split("\n") expected_output = '<link href="{}{}" rel="stylesheet" type="text/css" />'.format( settings.STATIC_URL, filename) assert expected_output in resp assert "<!--" not in resp assert "-->" not in resp finally: storage.delete(filename)
def test_should_copy_file(case): # type: (TestCase) -> None strategy = Strategy() local_storage = StaticFilesStorage() remote_hash = "foo" mock_remote_hash = mock.patch.object( strategy, "get_remote_file_hash", mock.MagicMock(return_value=remote_hash) ) with mock_remote_hash: with mock.patch.object( strategy, "get_local_file_hash", mock.MagicMock(return_value=remote_hash) ): case.assertFalse( strategy.should_copy_file("path", "prefixed_path", local_storage) ) with mock.patch.object( strategy, "get_local_file_hash", mock.MagicMock(return_value="bar") ): case.assertTrue( strategy.should_copy_file("path", "prefixed_path", local_storage) )
def test_templatetag_multiple_parts_of_path(): filenames = ('css/level1.css', 'css/level1-level2.css', 'css/level1-level2-level3.css') storage = StaticFilesStorage(location=settings.STATICFILES_TEST_DIR) try: for filename in filenames: storage.save(name=filename, content=ContentFile("body { background: red; }")) resp = T('{% load path2css %}{% css4path "/level1/level2/level3/" %}' ).render(CTX).strip() assert resp.split("\n") == [ '<link href="{}css/level1.css" rel="stylesheet" type="text/css" />' .format(settings.STATIC_URL), '<link href="{}css/level1-level2.css" rel="stylesheet" type="text/css" />' .format(settings.STATIC_URL), '<link href="{}css/level1-level2-level3.css" rel="stylesheet" type="text/css" />' .format(settings.STATIC_URL) ] finally: for filename in filenames: storage.delete(filename)
def test_templatetag_assignment(request_path, filenames): storage = StaticFilesStorage(location=settings.STATICFILES_TEST_DIR) try: for filename in filenames: storage.save(name=filename, content=ContentFile("body { background: red; }")) resp = T(''' {% load path2css %} {% css4path path as GOOSE %} {% for part, exists in GOOSE %} {% if exists %}{{ part }}{% endif %} {% endfor %} ''').render(Context({'path': request_path})).strip() parts = [x.strip() for x in resp.split("\n") if x.strip()] expected_output = [ "{}{}".format(settings.STATIC_URL, filename) for filename in filenames ] assert parts == expected_output finally: for filename in filenames: storage.delete(filename)
def xmlfiles(mylocation="xml"): s = StaticFilesStorage() xmls = list(get_files(s, location=mylocation)) xmlfiles = [(os.path.join(settings.STATIC_ROOT, filename)) for filename in xmls] return xmlfiles
def __init__(self, *args, **kwargs): super(S3LocalCachedMixin, self).__init__(*args, **kwargs) self._local = StaticFilesStorage(location=CONVOY_LOCAL_CACHE_ROOT)
class Command(BaseCommand): """ Do a 'collectstatic' for each LAYER definde in the settings (or default to collectstatic behaviour) This, unfortunately, means copy / repeating some original collectstatic code. """ option_list = BaseCommand.option_list def __init__(self, *args, **kwargs): """ Make sure there's always a self.layer """ self.layer = "" super(Command, self).__init__(*args, **kwargs) def handle_noargs(self, **options): """ Handle the invocation similarly to collectstatic """ self.set_options(**options) # Warn before doing anything more. if (isinstance(self.storage, FileSystemStorage) and self.storage.location): destination_path = self.storage.location destination_display = ':\n\n %s' % destination_path else: destination_path = None destination_display = '.' if self.clear: clear_display = 'This will DELETE EXISTING FILES!' else: clear_display = 'This will overwrite existing files!' if self.interactive: confirm = raw_input(u""" You have requested to collect static files at the destination location as specified in your settings%s %s Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % (destination_display, clear_display)) if confirm != 'yes': raise CommandError("Collecting static files cancelled.") layers = getattr(settings, "LAYERS", {}) modified_count = 0 unmodified_count = 0 post_processed_count = 0 ## .. but iterate over the layers if layers: for layer, path in layers.iteritems(): collected= self.invoke_collect(layer, path) modified_count += len(collected['modified']) unmodified_count += len(collected['unmodified']) post_processed_count += len(collected['post_processed']) else: collected = self.collect() modified_count = len(collected['modified']) unmodified_count = len(collected['unmodified']) post_processed_count = len(collected['post_processed']) if self.verbosity >= 1: template = ("\n%(modified_count)s %(identifier)s %(action)s" "%(destination)s%(unmodified)s%(post_processed)s.\n") summary = template % { 'modified_count': modified_count, 'identifier': 'static file' + (modified_count != 1 and 's' or ''), 'action': self.symlink and 'symlinked' or 'copied', 'destination': (destination_path and " to '%s'" % destination_path or ''), 'unmodified': (collected['unmodified'] and ', %s unmodified' % unmodified_count or ''), 'post_processed': (collected['post_processed'] and ', %s post-processed' % post_processed_count or ''), } self.stdout.write(smart_str(summary)) def invoke_collect(self, layer, path): """ Invoke collect, reset all instance storage first and initialize a self.storage that's bound to the layers target STATIC_ROOT """ self.stdout.write("Collecting layer %s to path %s\n" % (layer, path)) self.storage = StaticFilesStorage(path) try: self.storage.path('') except NotImplementedError: self.local = False else: self.local = True self.copied_files = [] self.symlinked_files = [] self.unmodified_files = [] self.post_processed_files = [] self.layer = layer return self.collect() def collect(self): """ Copied from collectstatic's Command.collect() with a tiny storage-layer check .. """ if self.symlink: if sys.platform == 'win32': raise CommandError("Symlinking is not supported by this " "platform (%s)." % sys.platform) if not self.local: raise CommandError("Can't symlink to a remote destination.") if self.clear: self.clear_dir('') if self.symlink: handler = self.link_file else: handler = self.copy_file found_files = SortedDict() for finder in finders.get_finders(): if isinstance(finder, AppLayerFinder): lister = lambda: finder.list(self.ignore_patterns, self.layer) else: lister = lambda: finder.list(self.ignore_patterns) for path, storage in lister(): ## .. is the storage part of the current layer? if hasattr(storage, 'layer') and storage.layer != self.layer: continue # Prefix the relative path if the source storage contains it if getattr(storage, 'prefix', None): prefixed_path = os.path.join(storage.prefix, path) else: prefixed_path = path if prefixed_path not in found_files: found_files[prefixed_path] = (storage, path) handler(path, prefixed_path, storage) # Here we check if the storage backend has a post_process # method and pass it the list of modified files. if self.post_process and hasattr(self.storage, 'post_process'): processor = self.storage.post_process(found_files, dry_run=self.dry_run) for original_path, processed_path, processed in processor: if processed: self.log(u"Post-processed '%s' as '%s" % (original_path, processed_path), level=1) self.post_processed_files.append(original_path) else: self.log(u"Skipped post-processing '%s'" % original_path) return { 'modified': self.copied_files + self.symlinked_files, 'unmodified': self.unmodified_files, 'post_processed': self.post_processed_files, }
from collectfast.strategies.base import HashStrategy from collectfast.tests.utils import make_test class Strategy(HashStrategy[FileSystemStorage]): def __init__(self) -> None: super().__init__(FileSystemStorage()) def get_remote_file_hash(self, prefixed_path: str) -> None: pass @make_test def test_get_file_hash(case: TestCase) -> None: strategy = Strategy() local_storage = StaticFilesStorage() with tempfile.NamedTemporaryFile(dir=local_storage.base_location) as f: f.write(b"spam") hash_ = strategy.get_local_file_hash(f.name, local_storage) case.assertTrue(re.fullmatch(r"^[A-z0-9]{32}$", hash_) is not None) @make_test def test_should_copy_file(case: TestCase) -> None: strategy = Strategy() local_storage = StaticFilesStorage() remote_hash = "foo" mock_remote_hash = mock.patch.object( strategy, "get_remote_file_hash", mock.MagicMock(return_value=remote_hash))
def default_header(): path = 'defaults/header.jpg' return ImageFile(open(StaticFilesStorage().path(path), 'rb'))
def get_file(): s = StaticFilesStorage() return list(get_files(s, location='adj'))
def default_avatar(variation=0): path = 'defaults/avatar_{0}.jpg'.format(variation % 10) return ImageFile(open(StaticFilesStorage().path(path), 'rb'))
def find_static_files(): storage = StaticFilesStorage() static_url = getattr(settings, 'STATIC_URL') return ['{}{}'.format(static_url, file) for file in get_files(storage)]
class S3LocalCachedMixin(object): """ Mixin that adds local caching to S3 storage backend """ def __init__(self, *args, **kwargs): super(S3LocalCachedMixin, self).__init__(*args, **kwargs) self._local = StaticFilesStorage(location=CONVOY_LOCAL_CACHE_ROOT) def save(self, name, content, *args, **kwargs): if not hasattr(content, 'chunks'): content = ContentFile(content) sname = self._save(name, content, *args, **kwargs) return sname def _save(self, name, content, *args, **kwargs): # some configurations of s3 backend mutate the content in place # Esp when AWS_IS_GZIPPED = True # keep a pre-mutation copy for the local cache so we don't save garbage to disk orig_content = copy.copy(content) sname = super(S3LocalCachedMixin, self)._save(name, content, *args, **kwargs) if self._local.exists(name): self._local.delete(name) lname = self._local._save(name, orig_content, *args, **kwargs) return name def delete(self, *args, **kwargs): if self._local.exists(*args, **kwargs): self._local.delete(*args, **kwargs) return super(S3LocalCachedMixin, self).delete(*args, **kwargs) def open(self, name, *args, **kwargs): if self._local.exists(name): #print "reading %s from cache" % name return self._local.open(name, *args, **kwargs) else: #print "reading %s from network" % name the_file = super(S3LocalCachedMixin, self).open(name, *args, **kwargs) #we had a cache miss, save it locally for the future self._local.save(name, the_file) if hasattr(the_file, "seek"): the_file.seek(0) return the_file def local_path(self, *args, **kwargs): return self._local.path(*args, **kwargs)