def _set_up_import_dir(self): imp_dir = join(self.compile_conf.PACKAGE_DIR, self.name) if exists(imp_dir): try: with open( join(self.compile_conf.PACKAGE_DIR, '{0:s}.version'.format(self.name)), 'r') as fh: stored_version = fh.read() except IOError: stored_version = None if self.version != stored_version: self.logger.info( 'removing wrong version {2:} of package {0:s} from "{1:s}"' .format(self.name, imp_dir, stored_version), level=3) try: remove(imp_dir) except IsADirectoryError: rmtree(imp_dir) if not exists(imp_dir): self.logger.info('copy package {0:} to "{1:}"'.format( self.name, imp_dir), level=3) link_or_copy(self.path, join(self.compile_conf.PACKAGE_DIR, self.name), exist_ok=True, allow_linking=True) with open( join(self.compile_conf.PACKAGE_DIR, '{0:s}.version'.format(self.name)), 'w+') as fh: fh.write(self.version)
def _do_process(self, func, name='res_proc'): if not self.allow_minify: return self.notes = [] if self.local_path is None: return if self.processed_path is None: self.processed_path = self.full_file_path src = self.processed_path makedirs(self.resource_dir, exist_ok=True, mode=0o700) self.processed_path = join(self.compile_conf.TMP_DIR, name, self.group_name, self.local_path) if self.logger.get_level() >= 3: self.logger.info(' processing {0:s} {1:s} -> {2:}'.format( self.__class__.__name__, src, self.processed_path), level=3) else: self.logger.info(' processing {0:s} {1:}'.format( self.__class__.__name__, self.processed_path), level=2) link_or_copy(src=self.cache.get_or_create_file(func=partial(func, src), dependencies=(src, )), dst=self.processed_path, exist_ok=False, allow_overwrite=True)
def _make_offline_from_file(self): self.logger.info(' making file available offline: {0:}'.format( self.remote_path), level=2) prefix = hash_str('{0:s}.{1:s}'.format(self.group_name, self.remote_path)) pth, self.local_params = self.split_params(self.remote_path) self.local_path = '{0:.6s}{1:s}'.format(prefix, basename(pth)) link_or_copy( src=self.cache.get_or_create_file(url=self.remote_path), dst=join(self.resource_dir, self.local_path), exist_ok=True, ) self.notes.append('downloaded from "{0:s}"'.format(self.remote_path))
def _make_offline_from_archive(self): self.logger.info(' making archive available offline: {0:}'.format( self.download_archive), level=2) prefix = hash_str('{0:s}.{1:s}'.format(self.group_name, self.download_archive)) self.archive_dir = '{0:.8s}_{1:s}'.format( prefix, splitext(basename(self.split_params(self.download_archive)[0]))[0]) archive = self.cache.get_or_create_file(url=self.download_archive) dir = self.cache.get_or_create_file(rzip=archive) link_or_copy(dir, join(self.resource_dir, self.archive_dir), exist_ok=True) self.local_path, self.local_params = self.split_params( self.downloaded_path)
def copy(self, to, allow_symlink=False): """ Copy necessary files to `to` if they are local. """ self.logger.info(' {0:} {2:} for {1:s}'.format(self.__class__.__name__, self.group_name, id(self) % 100000), level=3) if self.local_path is None: return if self.processed_path is None: self.processed_path = self.full_file_path if self.copy_map: allow_symlink = False # this may be too aggressive else: self.copy_map = {None: self.local_path} for src, dst in self.copy_map.items(): if src: assert '*' not in src, '{0:}: wildcards not allowed in copy_map'.format( self) assert self.resource_dir is not None, 'local resources should have resource_dir specified' srcpth = join(self.resource_dir, self.archive_dir or '', src) else: srcpth = self.processed_path dstpth = join(to, dst) if self.logger.get_level() >= 3: self.logger.info(' copying {0:s} {1:s} -> {2:}'.format( self.__class__.__name__, srcpth, dstpth), level=3) else: self.logger.info(' copying {0:s} {1:}'.format( self.__class__.__name__, dstpth), level=2) if exists(dstpth) and getmtime(dstpth) >= getmtime(srcpth): self.logger.info(' {0:s} {1:s} seems unchanged'.format( self.__class__.__name__, dstpth), level=3) else: link_or_copy(src=srcpth, dst=dstpth, follow_symlinks=True, allow_linking=allow_symlink, create_dirs=True, exist_ok=True)