def get_resources(self, soup): conf = {nm: [] for nm in self.resource_types.keys()} resource_tags = soup.find_all('resource') allowed_attrs = set( param.name for param in signature(Resource.__init__).parameters.values() if param.kind == param.KEYWORD_ONLY) for resource_tag in resource_tags: resource = copy(resource_tag.attrs) resource_type = resource.pop('type', 'static') if resource_type not in self.resource_types: raise InvalidDocumentError( '<resource> type must be one of {0:} if specified'.format( ', '.join(self.resource_types.keys()))) if resource.keys() - allowed_attrs: raise InvalidDocumentError(( 'did not recognize these attributes given to <resource>: [{0:s}]; ' 'known arguments are [{1:s}]').format( ', '.join(resource.keys() - allowed_attrs), ', '.join(allowed_attrs))) conf[resource_type].append(resource) resource_tag.extract() section_name = 'section_{0:8s}'.format(hash_str(self.path)) section_dir = dirname(self.path) return get_resources(group_name=section_name, path=section_dir, logger=self.logger, cache=self.cache, compile_conf=self.compile_conf, style_conf=conf['styles'], script_conf=conf['scripts'], static_conf=conf['static'], note='from section {0:s}'.format( basename(self.path)))[1:]
def get_file_cache_key(url, func, rzip, *, dependencies=(), extra=''): """ Generate a string to use as a key for caching this object. :param dependencies: A list of file paths that this result depends on; their last-modified time will be included in the key (so they expire when their dependencies change). :param extra: Extra information to add in the key; if this changes, the old cache value expires (cannot be found anymore). """ assert (bool(url) + bool(func) + bool(rzip) == 1 ), 'precisely one of url, func or rzip should be set' for dependency in dependencies: try: extra += '.' + hash_int(getmtime(dependency)) except FileNotFoundError: extra += '.' if url: key = 'dfc_url_{0:s}'.format(url) elif func: key = 'dfc_func_{0:s}'.format( DogpileAndFileCache.get_func_str(func)) elif rzip: tsstr = hash_int(getmtime(rzip)) if exists(rzip) else '0' key = 'dfc_rzip_{0:s}+{1:s}'.format(rzip, tsstr) else: raise AssertionError('one of url, func or rzip should be set') if extra: key += '.' + extra if len(key) > 24: return hash_str(key)[:24] return key
def setup_compiler(self): pre_opts, rest_args = pre_parse(argv[1:]) self.logger, self.compile_conf, self.document_conf, self.cache, self.loader = setup_singletons(opts=pre_opts) self.compile_file = pre_opts.input self.output_dir = join(self.compile_conf.TMP_DIR, 'live', hash_str(self.compile_file)[:8]) self.logger.info('live directory is {0:s}'.format(self.output_dir), level=1) makedirs(self.output_dir, exist_ok=True, mode=0o700) chdir(self.output_dir) #todo: is there a better way? self.do_compile()
def _make_offline_from_file(self): self.logger.info(' making file available offline: {0:}'.format( self.remote_path), level=2) prefix = hash_str('{0:s}.{1:s}'.format(self.group_name, self.remote_path)) pth, self.local_params = self.split_params(self.remote_path) self.local_path = '{0:.6s}{1:s}'.format(prefix, basename(pth)) link_or_copy( src=self.cache.get_or_create_file(url=self.remote_path), dst=join(self.resource_dir, self.local_path), exist_ok=True, ) self.notes.append('downloaded from "{0:s}"'.format(self.remote_path))
def _make_offline_from_archive(self): self.logger.info(' making archive available offline: {0:}'.format( self.download_archive), level=2) prefix = hash_str('{0:s}.{1:s}'.format(self.group_name, self.download_archive)) self.archive_dir = '{0:.8s}_{1:s}'.format( prefix, splitext(basename(self.split_params(self.download_archive)[0]))[0]) archive = self.cache.get_or_create_file(url=self.download_archive) dir = self.cache.get_or_create_file(rzip=archive) link_or_copy(dir, join(self.resource_dir, self.archive_dir), exist_ok=True) self.local_path, self.local_params = self.split_params( self.downloaded_path)
def setup_singletons(opts): """ Configure some singleton configuration classes. """ tick = Ticker() logger = BasicLogger(verbosity=opts.verbosity) logger.info('created logger ({0:.0f}ms)'.format(tick()), level=2) session_hash = hash_str(opts.input) compile_conf = CompileSettings(logger=logger, opts=opts, session=session_hash) logger.info('load compile settings ({0:.0f}ms)'.format(tick()), level=2) document_conf = DocumentSettings(logger=logger, opts=opts) logger.info('load document settings ({0:.0f}ms)'.format(tick()), level=2) cache = DogpileAndFileCache(cache_dir=join(compile_conf.TMP_DIR, 'filecache')) logger.info('created cache binding ({0:.0f}ms)'.format(tick()), level=2) loader = SourceLoader(dir_paths=(dirname(realpath(opts.input)),)) logger.info('create file loader ({0:.0f}ms)'.format(tick()), level=2) # parser = LXML_Parser() return logger, compile_conf, document_conf, cache, loader
def get_signature(self): #on installing, not all the time return hash_str(self.get_file_signatures_string())