def __init__(self, app, name, url, filename): self.name = name self.url = url self.filename = filename self.md5 = get_md5_from_file(filename) self.sha256 = get_sha256_from_file(filename) self.archive_filename = '%s.%s' % (app.name, name)
def _read_index_json(self, cache_dir, json_apps): files_to_download = [] something_changed = False files_in_json_file = [] for appname, appinfo in json_apps.iteritems(): for appfile, appfileinfo in appinfo.iteritems(): filename = os.path.basename('%s.%s' % (appname, appfile)) remote_md5sum = appfileinfo['md5'] remote_url = appfileinfo['url'] # compare with local cache cached_filename = os.path.join(cache_dir, filename) files_in_json_file.append(cached_filename) local_md5sum = get_md5_from_file(cached_filename) if remote_md5sum != local_md5sum: # ask to re-download this file files_to_download.append((remote_url, filename, remote_md5sum)) something_changed = True # remove those files that apparently do not exist on server anymore for cached_filename in glob(os.path.join(cache_dir, '*')): if os.path.basename(cached_filename).startswith('.'): continue if os.path.isdir(cached_filename): continue if cached_filename not in files_in_json_file: self.log('Deleting obsolete %s' % cached_filename) something_changed = True os.unlink(cached_filename) return files_to_download, something_changed
def _extract_local_archive(self, app_cache): local_archive = self._get_local_archive(app_cache) if not local_archive: # Not my local_archive return False if any(not fname.startswith('.') for fname in os.listdir(app_cache.get_cache_dir())): # we already have a cache. our archive is just outdated... return False self.log('Filling the App Center file cache from our local archive %s!' % local_archive) try: archive = tarfile.open(local_archive, 'r:*') except (tarfile.TarError, IOError) as e: self.warn('Error while reading %s: %s' % (local_archive, e)) return try: for member in archive.getmembers(): filename = member.name if os.path.sep in filename: # just some paranoia continue self.debug('Extracting %s' % filename) archive.extract(filename, path=app_cache.get_cache_dir()) self._files_downloaded[filename] = get_md5_from_file(os.path.join(app_cache.get_cache_dir(), filename)) finally: archive.close() return True
def _update_local_files(self): self.debug('Updating app files...') if container_mode(): self.debug('do not update files in container mode...') return update_files = { 'inst': lambda x: self._get_joinscript_path(x, unjoin=False), 'schema': lambda x: x.get_share_file('schema'), 'univention-config-registry-variables': lambda x: x.get_share_file('univention-config-registry-variables'), } for app in Apps().get_all_locally_installed_apps(): for file in update_files: src = app.get_cache_file(file) dest = update_files[file](app) if not os.path.exists(src): if app.docker: # remove files that do not exist on server anymore if os.path.exists(dest): self.log('Deleting obsolete app file %s' % dest) os.unlink(dest) else: # update local files if downloaded component_file = '%s.%s' % (app.component_id, file) if component_file not in self._files_downloaded: continue src_md5 = self._files_downloaded[component_file] dest_md5 = None if os.path.exists(dest): dest_md5 = get_md5_from_file(dest) if dest_md5 is None or src_md5 != dest_md5: self.log('Copying %s to %s' % (src, dest)) shutil.copy2(src, dest) if file == 'inst': os.chmod(dest, 0o755) # some variables could change UCR templates # e.g. Name, Description self._update_conffiles()
def _download_directly(self, app_cache, files_to_download): for filename_url, filename, remote_md5sum in files_to_download: # dont forget to quote: 'foo & bar.ini' -> 'foo%20&%20bar.ini' # but dont quote https:// -> https%3A// path = quote(urlsplit(filename_url).path) filename_url = '%s%s' % (app_cache.get_server(), path) cached_filename = os.path.join(app_cache.get_cache_dir(), filename) self.debug('Downloading %s' % filename_url) try: urlcontent = urlopen(filename_url) except Exception as e: self.fatal('Error downloading %s: %s' % (filename_url, e)) else: with open(cached_filename, 'wb') as f: f.write(urlcontent.read()) local_md5sum = get_md5_from_file(cached_filename) if local_md5sum != remote_md5sum: self.fatal('Checksum for %s should be %r but was %r! Rather removing this file...' % (filename, remote_md5sum, local_md5sum)) os.unlink(cached_filename) self._files_downloaded[filename] = remote_md5sum
def _download_archive(self, app_cache, files_to_download): # a lot of files to download? Do not download them # one at a time. Download the full archive! files_still_to_download = [] archive_url = os.path.join(app_cache.get_server(), 'meta-inf', app_cache.get_ucs_version(), 'all.tar.gz') try: self.log('Downloading "%s"...' % archive_url) # for some reason saving this in memory is flawed. # using StringIO and GZip objects has issues # with "empty" files in tar.gz archives, i.e. # doublets like .png logos with open(os.path.join(app_cache.get_cache_dir(), 'all.tar.gz'), 'wb') as f: f.write(urlopen(archive_url).read()) archive = tarfile.open(f.name, 'r:*') try: for filename_url, filename, remote_md5sum in files_to_download: self.debug('Extracting %s' % filename) try: archive.extract(filename, path=app_cache.get_cache_dir()) absolute_filename = os.path.join(app_cache.get_cache_dir(), filename) os.chown(absolute_filename, 0, 0) os.chmod(absolute_filename, 0o664) local_md5sum = get_md5_from_file(absolute_filename) if local_md5sum != remote_md5sum: self.warn('Checksum for %s should be %r but was %r! Download manually' % (filename, remote_md5sum, local_md5sum)) raise KeyError(filename) self._files_downloaded[filename] = remote_md5sum except KeyError: self.warn('%s not found in archive!' % filename) files_still_to_download.append((filename_url, filename, remote_md5sum)) finally: archive.close() os.unlink(f.name) return files_still_to_download except Exception as exc: self.fatal('Could not read "%s": %s' % (archive_url, exc)) return files_to_download