def librsyncPatchFile(oldfile, deltafile, newfile): logger.debug( u"Librsync patch: old file {!r}, delta file {!r}, new file {!r}", oldfile, deltafile, newfile) oldfile = forceFilename(oldfile) newfile = forceFilename(newfile) deltafile = forceFilename(deltafile) if oldfile == newfile: raise ValueError(u"Oldfile and newfile are the same file") if deltafile == newfile: raise ValueError(u"deltafile and newfile are the same file") if deltafile == oldfile: raise ValueError(u"oldfile and deltafile are the same file") bufsize = 1024 * 1024 try: with open(oldfile, "rb") as of: with open(deltafile, "rb") as df: with open(newfile, "wb") as nf: with closing(librsync.PatchedFile(of, df)) as pf: data = True while data: data = pf.read(bufsize) nf.write(data) except Exception as patchError: logger.debug("Patching {!r} with delta {!r} into {!r} failed: {}", oldfile, deltafile, newfile, patchError) raise RuntimeError(u"Failed to patch file %s: %s" % (oldfile, forceUnicode(patchError)))
def _copy(src, dst, copySrcContent=False, fileCount=0, totalFiles=0, totalSize=0, progressSubject=None): src = forceFilename(src) dst = forceFilename(dst) if os.path.isfile(src): fileCount += 1 size = 0 if not os.path.exists(dst): parent = os.path.dirname(dst) if not os.path.isdir(parent): os.makedirs(parent) if progressSubject: countLen = len(str(totalFiles)) countLenFormat = '%' + str(countLen) + 's' size = os.path.getsize(src) if size > 1024 * 1024: sizeString = "%0.2f MByte" % (float(size) / (1024 * 1024)) elif size > 1024: sizeString = "%0.2f kByte" % (float(size) / 1024) else: sizeString = "%d Byte" % size progressSubject.setMessage(u"[%s/%s] %s (%s)" % (countLenFormat % fileCount, totalFiles, os.path.basename(src), sizeString)) try: shutil.copy2(src, dst) except OSError as error: logger.debug(error) # Operation not permitted if error.errno != 1: raise if progressSubject: progressSubject.addToState(size) elif os.path.isdir(src): if not os.path.isdir(dst): os.makedirs(dst) elif not copySrcContent: dst = os.path.join(dst, os.path.basename(src)) for element in os.listdir(src): fileCount = _copy(os.path.join(src, element), os.path.join(dst, element), True, fileCount, totalFiles, totalSize, progressSubject) return fileCount
def copy(src, dst, progressSubject=None): ''' Copy from `src` to `dst`. The copy process will follow these rules: * src = file, dst = file: overwrite dst * src = file, dst = dir: copy into dst * src = file, dst = not existent: create dst directories, copy src to dst * src = dir, dst = file: Exception * src = dir, dst = dir: copy src dir into dst * src = dir, dst = not existent: create dst, copy content of src into dst * src = dir/*, dst = dir/not existent: create dst if not exists, copy content of src into dst ''' for hook in hooks: (src, dst, progressSubject) = hook.pre_copy(src, dst, progressSubject) try: src = forceFilename(src) dst = forceFilename(dst) copySrcContent = False if src.endswith(('/*.*', '\\*.*')): src = src[:-4] copySrcContent = True elif src.endswith(('/*', '\\*')): src = src[:-2] copySrcContent = True if copySrcContent and not os.path.isdir(src): raise IOError(u"Source directory '%s' not found" % src) logger.info(u"Copying from '%s' to '%s'" % (src, dst)) (count, size) = (0, 0) if progressSubject: progressSubject.reset() (count, size) = getCountAndSize(src) progressSubject.setEnd(size) _copy(src, dst, copySrcContent, 0, count, size, progressSubject) logger.info(u'Copy done') if progressSubject: progressSubject.setState(size) except Exception as e: for hook in hooks: hook.error_copy(src, dst, progressSubject, e) raise for hook in hooks: hook.post_copy(src, dst, progressSubject)
def create(self, fileList, baseDir='.', dereference=False): try: fileList = forceUnicodeList(fileList) baseDir = os.path.abspath(forceFilename(baseDir)) dereference = forceBool(dereference) if not os.path.isdir(baseDir): raise IOError(u"Base dir '%s' not found" % baseDir) command = u'%s --create --quiet --verbose --format crc' % System.which( 'cpio') if dereference: command += ' --dereference' if self._compression == 'gzip': if self.pigz_detected: command += ' | %s --rsyncable' % System.which('pigz') else: command += ' | %s --rsyncable' % System.which('gzip') elif self._compression == 'bzip2': command += ' | %s' % System.which('bzip2') command += ' > "%s"' % self._filename self._create(fileList, baseDir, command) except Exception as e: raise RuntimeError(u"Failed to create archive '%s': %s" % (self._filename, e))
def Archive(filename, format=None, compression=None, progressSubject=None): filename = forceFilename(filename) Class = None if format: format = forceUnicodeLower(format) if format == 'tar': Class = TarArchive elif format == 'cpio': Class = CpioArchive else: raise ValueError(u"Unsupported format '%s'" % format) elif os.path.exists(filename): fileType = getFileType(filename) if 'tar archive' in fileType.lower(): Class = TarArchive elif 'cpio archive' in fileType.lower(): Class = CpioArchive elif filename.lower().endswith(('tar', 'tar.gz')): Class = TarArchive elif filename.lower().endswith(('cpio', 'cpio.gz')): Class = CpioArchive if not Class: raise RuntimeError(u"Failed to guess archive type of '%s'" % filename) return Class(filename=filename, compression=compression, progressSubject=progressSubject)
def getCountAndSize(path): path = os.path.abspath(forceFilename(path)) for hook in hooks: path = hook.pre_getCountAndSize(path) (count, size) = (0, 0) try: if os.path.isfile(path): logger.debug2(u"Is file: {0}", path) (count, size) = (1, os.path.getsize(path)) elif os.path.isdir(path): logger.debug2(u"Is dir: {0}", path) logger.debug(u"Counting and getting sizes of files in dir {0!r}", path) for element in os.listdir(path): (elementCount, elementSize) = getCountAndSize(os.path.join(path, element)) count += elementCount size += elementSize except Exception as error: for hook in hooks: hook.error_getCountAndSize(path, (count, size), error) raise for hook in hooks: (count, size) = hook.post_getCountAndSize(path, (count, size)) return (count, size)
def __init__(self, packageFile, tempDir=None): self.packageFile = os.path.abspath(forceFilename(packageFile)) if not os.path.exists(self.packageFile): raise IOError(u"Package file '%s' not found" % self.packageFile) tempDir = tempDir or DEFAULT_TMP_DIR self.tempDir = os.path.abspath(forceFilename(tempDir)) if not os.path.isdir(self.tempDir): raise IOError(u"Temporary directory '%s' not found" % self.tempDir) self.clientDataDir = None self.tmpUnpackDir = os.path.join(self.tempDir, u'.opsi.unpack.%s' % randomString(5)) self.packageControlFile = None self.clientDataFiles = []
def librsyncDeltaFile(filename, signature, deltafile): bufsize = 1024 * 1024 filename = forceFilename(filename) deltafile = forceFilename(deltafile) logger.debug("Creating deltafile {!r} on base of {!r}", deltafile, filename) try: with open(filename, "rb") as f: with open(deltafile, "wb") as df: with closing(librsync.DeltaFile(signature, f)) as ldf: data = True while data: data = ldf.read(bufsize) df.write(data) except Exception as e: raise RuntimeError(u"Failed to write delta file %s: %s" % (deltafile, forceUnicode(e)))
def getFileType(filename): if os.name == 'nt': raise NotImplementedError(u"getFileType() not implemented on windows") filename = forceFilename(filename) with closing(magic.open(magic.MAGIC_SYMLINK)) as ms: ms.load() return ms.file(filename)
def extract(self, targetPath='.', patterns=[]): try: targetPath = os.path.abspath(forceFilename(targetPath)) patterns = forceUnicodeList(patterns) if not os.path.isdir(targetPath): try: os.mkdir(targetPath) except Exception as e: raise IOError(u"Failed to create target dir '%s': %s" % (targetPath, e)) cat = System.which('cat') if self._compression == 'gzip': if self.pigz_detected: cat = u'%s --stdout --decompress' % ( System.which('pigz'), ) else: cat = System.which('zcat') elif self._compression == 'bzip2': cat = System.which('bzcat') fileCount = 0 for filename in self.content(): match = False if not patterns: match = True else: for pattern in patterns: try: pattern = pattern.replace('*', '.*') if re.search(pattern, filename): match = True break fileCount += 1 except Exception as e: raise ValueError(u"Bad pattern '%s': %s" % (pattern, e)) if match: fileCount += 1 include = ' '.join('"%s"' % pattern for pattern in patterns) curDir = os.path.abspath(os.getcwd()) os.chdir(targetPath) try: command = u'%s "%s" | %s --quiet --extract --make-directories --unconditional --preserve-modification-time --verbose --no-preserve-owner %s' % ( cat, self._filename, System.which('cpio'), include) self._extract(command, fileCount) finally: os.chdir(curDir) except Exception as e: raise RuntimeError(u"Failed to extract archive '%s': %s" % (self._filename, e))
def librsyncSignature(filename, base64Encoded=True): filename = forceFilename(filename) try: with open(filename, 'rb') as f: with closing(librsync.SigFile(f)) as sf: sig = sf.read() if base64Encoded: sig = base64.encodestring(sig) return sig except Exception as sigError: raise RuntimeError(u"Failed to get librsync signature from %s: %s" % (filename, forceUnicode(sigError)))
def extract(self, targetPath='.', patterns=[]): try: targetPath = os.path.abspath(forceFilename(targetPath)) patterns = forceUnicodeList(patterns) if not os.path.isdir(targetPath): try: os.mkdir(targetPath) except Exception as e: raise IOError(u"Failed to create target dir '%s': %s" % (targetPath, e)) options = u'' if self._compression == 'gzip': if self.pigz_detected: options += u'--use-compress-program=pigz' else: options += u'--gunzip' elif self._compression == 'bzip2': options += u'--bzip2' fileCount = 0 for filename in self.content(): match = False if not patterns: match = True else: for pattern in patterns: try: pattern = pattern.replace('*', '.*') if re.search(pattern, filename): match = True break fileCount += 1 except Exception as e: raise ValueError(u"Bad pattern '%s': %s" % (pattern, e)) if match: fileCount += 1 else: options += u' --exclude="%s"' % filename command = u'%s %s --directory "%s" --extract --verbose --file "%s"' % ( System.which('tar'), options, targetPath, self._filename) self._extract(command, fileCount) except Exception as e: raise RuntimeError(u"Failed to extract archive '%s': %s" % (self._filename, e))
def __init__(self, **kwargs): self._database = ":memory:" self._synchronous = True self._databaseCharset = 'utf8' for (option, value) in kwargs.items(): option = option.lower() if option == 'database': self._database = forceFilename(value) elif option == 'synchronous': self._synchronous = forceBool(value) elif option == 'databasecharset': self._databaseCharset = str(value) self._connection = None self._cursor = None logger.debug(u'SQLite created: %s' % self)
def __init__(self, filename, compression=None, progressSubject=None): self._filename = forceFilename(filename) self._progressSubject = progressSubject self._compression = None if compression: compression = forceUnicodeLower(compression) if compression not in ('gzip', 'bzip2'): raise ValueError(u"Compression '%s' not supported" % compression) self._compression = compression elif os.path.exists(self._filename): fileType = getFileType(self._filename) if fileType.lower().startswith('gzip compressed data'): self._compression = u'gzip' elif fileType.lower().startswith('bzip2 compressed data'): self._compression = u'bzip2' else: self._compression = None
def mkdir(newDir, mode=0o750): """ Create a new directory. If newDir is a already existing directory this will complete silently. If newDir is a file an exception will be risen. If parent directories do not exist they are created aswell. """ newDir = os.path.abspath(forceFilename(newDir)) if os.path.isdir(newDir): pass elif os.path.isfile(newDir): raise OSError( u"A file with the same name as the desired dir, '%s', already exists." % newDir) else: (head, tail) = os.path.split(newDir) if head and not os.path.isdir(head): mkdir(head, mode=mode) if tail: os.mkdir(newDir) os.chmod(newDir, mode)
def countFiles(path): path = os.path.abspath(forceFilename(path)) for hook in hooks: path = hook.pre_countFiles(path) count = 0 try: if os.path.islink(path): pass elif os.path.isfile(path): count = 1 elif os.path.isdir(path): logger.debug(u"Counting files in dir '%s'" % path) for element in os.listdir(path): count += countFiles(os.path.join(path, element)) except Exception as error: for hook in hooks: hook.error_countFiles(path, count, error) raise for hook in hooks: count = hook.post_countFiles(path, count) return count
def getSize(path): path = os.path.abspath(forceFilename(path)) for hook in hooks: path = hook.pre_getSize(path) size = 0 try: if os.path.islink(path): pass elif os.path.isfile(path): size = os.path.getsize(path) elif os.path.isdir(path): logger.debug(u"Getting size of files in dir '%s'" % path) for element in os.listdir(path): size += getSize(os.path.join(path, element)) except Exception as error: for hook in hooks: hook.error_getSize(path, size, error) raise for hook in hooks: size = hook.post_getSize(path, size) return size
def getDirectorySize(path): path = os.path.abspath(forceFilename(path)) for hook in hooks: path = hook.pre_getDirectorySize(path) size = 0 try: for element in os.listdir(path): absolutePath = os.path.join(path, element) if os.path.islink(absolutePath): continue elif os.path.isfile(absolutePath): size += os.path.getsize(absolutePath) elif os.path.isdir(absolutePath): size += getDirectorySize(absolutePath) except Exception as error: for hook in hooks: hook.error_getDirectorySize(path, size, error) raise for hook in hooks: size = hook.post_getDirectorySize(path, size) return size
def setClientDataDir(self, clientDataDir): self.clientDataDir = os.path.abspath(forceFilename(clientDataDir)) logger.info(u"Client data dir set to '%s'" % self.clientDataDir)
def __init__(self, packageSourceDir, tempDir=None, customName=None, customOnly=False, packageFileDestDir=None, format='cpio', compression='gzip', dereference=False): self.packageSourceDir = os.path.abspath( forceFilename(packageSourceDir)) if not os.path.isdir(self.packageSourceDir): raise IOError(u"Package source directory '%s' not found" % self.packageSourceDir) tempDir = tempDir or DEFAULT_TMP_DIR self.tempDir = os.path.abspath(forceFilename(tempDir)) if not os.path.isdir(self.tempDir): raise IOError(u"Temporary directory '%s' not found" % self.tempDir) self.customName = None if customName: self.customName = forcePackageCustomName(customName) self.customOnly = forceBool(customOnly) if format: if format not in (u'cpio', u'tar'): raise ValueError(u"Format '%s' not supported" % format) self.format = format else: self.format = u'cpio' if not compression: self.compression = None else: if compression not in (u'gzip', u'bzip2'): raise ValueError(u"Compression '%s' not supported" % compression) self.compression = compression self.dereference = forceBool(dereference) if not packageFileDestDir: packageFileDestDir = self.packageSourceDir packageFileDestDir = os.path.abspath(forceFilename(packageFileDestDir)) if not os.path.isdir(packageFileDestDir): raise IOError(u"Package destination directory '%s' not found" % packageFileDestDir) packageControlFile = os.path.join(self.packageSourceDir, u'OPSI', u'control') if customName and os.path.exists( os.path.join(self.packageSourceDir, u'OPSI.%s' % customName, u'control')): packageControlFile = os.path.join(self.packageSourceDir, u'OPSI.%s' % customName, u'control') self.packageControlFile = PackageControlFile(packageControlFile) self.packageControlFile.parse() customName = u'' if self.customName: customName = u'~%s' % self.customName self.packageFile = os.path.join( packageFileDestDir, u"%s_%s-%s%s.opsi" % (self.packageControlFile.getProduct().id, self.packageControlFile.getProduct().productVersion, self.packageControlFile.getProduct().packageVersion, customName)) self.tmpPackDir = os.path.join(self.tempDir, u'.opsi.pack.%s' % randomString(5))
def unpackSource(self, destinationDir=u'.', newProductId=None, progressSubject=None): logger.info(u"Extracting package source from '%s'" % self.packageFile) if progressSubject: progressSubject.setMessage( _(u"Extracting package source from '%s'") % self.packageFile) try: destinationDir = forceFilename(destinationDir) if newProductId: newProductId = forceUnicode(newProductId) archive = Archive(filename=self.packageFile, progressSubject=progressSubject) logger.debug(u"Extracting source from package '%s' to: '%s'" % (self.packageFile, destinationDir)) if progressSubject: progressSubject.setMessage(_(u'Extracting archives')) archive.extract(targetPath=self.tmpUnpackDir) for f in os.listdir(self.tmpUnpackDir): logger.info(u"Processing file '%s'" % f) archiveName = u'' if f.endswith('.cpio.gz'): archiveName = f[:-8] elif f.endswith('.cpio'): archiveName = f[:-5] elif f.endswith('.tar.gz'): archiveName = f[:-7] elif f.endswith('.tar'): archiveName = f[:-4] elif f.startswith('OPSI'): continue else: logger.warning(u"Unknown content in archive: %s" % f) continue archive = Archive(filename=os.path.join(self.tmpUnpackDir, f), progressSubject=progressSubject) if progressSubject: progressSubject.setMessage( _(u'Extracting archive %s') % archiveName) archive.extract( targetPath=os.path.join(destinationDir, archiveName)) if newProductId: self.getMetaData() product = self.packageControlFile.getProduct() for scriptName in (u'setupScript', u'uninstallScript', u'updateScript', u'alwaysScript', u'onceScript', u'customScript'): script = getattr(product, scriptName) if not script: continue newScript = script.replace(product.id, newProductId) if not os.path.exists( os.path.join(destinationDir, u'CLIENT_DATA', script)): logger.warning( u"Script file '%s' not found" % os.path.join( destinationDir, u'CLIENT_DATA', script)) continue os.rename( os.path.join(destinationDir, u'CLIENT_DATA', script), os.path.join(destinationDir, u'CLIENT_DATA', newScript)) setattr(product, scriptName, newScript) product.setId(newProductId) self.packageControlFile.setProduct(product) self.packageControlFile.setFilename( os.path.join(destinationDir, u'OPSI', u'control')) self.packageControlFile.generate() logger.debug(u"Finished extracting package source") except Exception as e: logger.logException(e, LOG_INFO) self.cleanup() raise RuntimeError( u"Failed to extract package source from '%s': %s" % (self.packageFile, e))
def installPackage(self, filename, force=False, propertyDefaultValues={}, tempDir=None, forceProductId=None, suppressPackageContentFileGeneration=False): @contextmanager def productPackageFile(filename, tempDir, depotId): try: depots = self._depotBackend._context.host_getObjects( id=depotId) depot = depots[0] del depots except IndexError: raise BackendMissingDataError( u"Depot '%s' not found in backend" % depotId) depotLocalUrl = depot.getDepotLocalUrl() if not depotLocalUrl.startswith(u'file:///'): raise BackendBadValueError( u"Value '%s' not allowed for depot local url (has to start with 'file:///')" % depotLocalUrl) clientDataDir = depotLocalUrl[7:] ppf = ProductPackageFile(filename, tempDir=tempDir) ppf.setClientDataDir(clientDataDir) ppf.getMetaData() try: yield ppf ppf.setAccessRights() finally: try: ppf.cleanup() except Exception as cleanupError: logger.error("Cleanup failed: {0!r}", cleanupError) @contextmanager def lockProduct(backend, product, depotId, forceInstallation): productId = product.getId() logger.debug("Checking for locked product '{}' on depot '{}'", productId, depotId) productOnDepots = backend.productOnDepot_getObjects( depotId=depotId, productId=productId) try: if productOnDepots[0].getLocked(): logger.notice( u"Product '{0}' currently locked on depot '{1}'", productId, depotId) if not forceInstallation: raise BackendTemporaryError( u"Product '{}' currently locked on depot '{}'". format(productId, depotId)) logger.warning(u"Installation of locked product forced") except IndexError: pass logger.notice(u"Locking product '{0}' on depot '{1}'", productId, depotId) productOnDepot = ProductOnDepot( productId=productId, productType=product.getType(), productVersion=product.getProductVersion(), packageVersion=product.getPackageVersion(), depotId=depotId, locked=True) logger.info(u"Creating product on depot {0}", productOnDepot) backend.productOnDepot_createObjects(productOnDepot) try: yield productOnDepot except Exception as err: logger.warning( "Installation error. Not unlocking product '{}' on depot '{}'.", productId, depotId) raise err logger.notice(u"Unlocking product '{0}' {1}-{2} on depot '{3}'", productOnDepot.getProductId(), productOnDepot.getProductVersion(), productOnDepot.getPackageVersion(), depotId) productOnDepot.setLocked(False) backend.productOnDepot_updateObject(productOnDepot) @contextmanager def runPackageScripts(productPackageFile, depotId): logger.info(u"Running preinst script") for line in productPackageFile.runPreinst(({'DEPOT_ID': depotId})): logger.info(u"[preinst] {0}", line) yield logger.info(u"Running postinst script") for line in productPackageFile.runPostinst({'DEPOT_ID': depotId}): logger.info(u"[postinst] {0}", line) def cleanUpProducts(backend, productId): productIdents = set() for productOnDepot in backend.productOnDepot_getObjects( productId=productId): productIdent = u"%s;%s;%s" % (productOnDepot.productId, productOnDepot.productVersion, productOnDepot.packageVersion) productIdents.add(productIdent) deleteProducts = set( product for product in backend.product_getObjects(id=productId) if product.getIdent(returnType='unicode') not in productIdents) if deleteProducts: backend.product_deleteObjects(deleteProducts) def cleanUpProductPropertyStates(backend, productProperties, depotId, productOnDepot): productPropertiesToCleanup = {} for productProperty in productProperties: if productProperty.editable or not productProperty.possibleValues: continue productPropertiesToCleanup[ productProperty.propertyId] = productProperty if productPropertiesToCleanup: clientIds = set(clientToDepot['clientId'] for clientToDepot in backend.configState_getClientToDepotserver( depotIds=depotId)) if clientIds: deleteProductPropertyStates = [] updateProductPropertyStates = [] states = backend.productPropertyState_getObjects( objectId=clientIds, productId=productOnDepot.getProductId(), propertyId=list(productPropertiesToCleanup.keys())) for productPropertyState in states: changed = False newValues = [] for value in productPropertyState.values: productProperty = productPropertiesToCleanup[ productPropertyState.propertyId] if value in productProperty.possibleValues: newValues.append(value) continue if productProperty.getType( ) == 'BoolProductProperty' and forceBool( value) in productProperty.possibleValues: newValues.append(forceBool(value)) changed = True continue elif productProperty.getType( ) == 'UnicodeProductProperty': newValue = None for possibleValue in productProperty.possibleValues: if forceUnicodeLower( possibleValue ) == forceUnicodeLower(value): newValue = possibleValue break if newValue is not None: newValues.append(newValue) changed = True continue changed = True if changed: if not newValues: logger.debug( u"Properties changed: marking productPropertyState {0} for deletion", productPropertyState) deleteProductPropertyStates.append( productPropertyState) else: productPropertyState.setValues(newValues) logger.debug( u"Properties changed: marking productPropertyState {0} for update", productPropertyState) updateProductPropertyStates.append( productPropertyState) if deleteProductPropertyStates: backend.productPropertyState_deleteObjects( deleteProductPropertyStates) if updateProductPropertyStates: backend.productPropertyState_updateObjects( updateProductPropertyStates) depotId = self._depotBackend._depotId logger.info( u"=================================================================================================" ) if forceProductId: forceProductId = forceProductIdFunc(forceProductId) logger.notice( u"Installing package file '{filename}' as '{productId}' on depot '{depotId}'", filename=filename, depotId=depotId, productId=forceProductId) else: logger.notice( u"Installing package file '{filename}' on depot '{depotId}'", filename=filename, depotId=depotId) try: filename = forceFilename(filename) force = forceBool(force) propertyDefaultValues = forceDict(propertyDefaultValues) for propertyId in propertyDefaultValues: if propertyDefaultValues[propertyId] is None: propertyDefaultValues[propertyId] = [] if tempDir: tempDir = forceFilename(tempDir) else: tempDir = None if not os.path.isfile(filename): raise BackendIOError( u"Package file '{0}' does not exist or can not be accessed." .format(filename)) if not os.access(filename, os.R_OK): raise BackendIOError( u"Read access denied for package file '%s'" % filename) try: dataBackend = self._depotBackend._context with productPackageFile(filename, tempDir, depotId) as ppf: product = ppf.packageControlFile.getProduct() if forceProductId: logger.info(u"Forcing product id '{0}'", forceProductId) product.setId(forceProductId) ppf.packageControlFile.setProduct(product) productId = product.getId() logger.info(u"Creating product in backend") dataBackend.product_createObjects(product) with lockProduct(dataBackend, product, depotId, force) as productOnDepot: logger.info(u"Checking package dependencies") self.checkDependencies(ppf) with runPackageScripts(ppf, depotId): logger.info(u"Deleting old client-data dir") ppf.deleteProductClientDataDir() logger.info(u"Unpacking package files") ppf.extractData() logger.info( u"Updating product dependencies of product %s" % product) currentProductDependencies = {} for productDependency in dataBackend.productDependency_getObjects( productId=productId, productVersion=product.getProductVersion(), packageVersion=product.getPackageVersion( )): ident = productDependency.getIdent( returnType='unicode') currentProductDependencies[ ident] = productDependency productDependencies = [] for productDependency in ppf.packageControlFile.getProductDependencies( ): if forceProductId: productDependency.productId = productId ident = productDependency.getIdent( returnType='unicode') try: del currentProductDependencies[ident] except KeyError: pass # Dependency does currently not exist. productDependencies.append(productDependency) dataBackend.productDependency_createObjects( productDependencies) if currentProductDependencies: dataBackend.productDependency_deleteObjects( currentProductDependencies.values()) logger.info( u"Updating product properties of product %s" % product) currentProductProperties = {} productProperties = [] for productProperty in dataBackend.productProperty_getObjects( productId=productId, productVersion=product.getProductVersion(), packageVersion=product.getPackageVersion( )): ident = productProperty.getIdent( returnType='unicode') currentProductProperties[ ident] = productProperty for productProperty in ppf.packageControlFile.getProductProperties( ): if forceProductId: productProperty.productId = productId ident = productProperty.getIdent( returnType='unicode') try: del currentProductProperties[ident] except KeyError: pass # Property not found - everyhing okay productProperties.append(productProperty) dataBackend.productProperty_createObjects( productProperties) for productProperty in productProperties: # Adjust property default values if productProperty.editable or not productProperty.possibleValues: continue newValues = [ value for value in propertyDefaultValues.get( productProperty.propertyId, []) if value in productProperty.possibleValues ] if not newValues and productProperty.defaultValues: newValues = productProperty.defaultValues propertyDefaultValues[ productProperty.propertyId] = newValues if currentProductProperties.values(): dataBackend.productProperty_deleteObjects( currentProductProperties.values()) logger.info( u"Deleting product property states of product %s on depot '%s'" % (productId, depotId)) dataBackend.productPropertyState_deleteObjects( dataBackend.productPropertyState_getObjects( productId=productId, objectId=depotId)) logger.info( u"Deleting not needed property states of product %s" % productId) productPropertyStates = dataBackend.productPropertyState_getObjects( productId=productId) baseProperties = dataBackend.productProperty_getObjects( productId=productId) productPropertyIds = None productPropertyStatesToDelete = None productPropertyIds = [ productProperty.propertyId for productProperty in baseProperties ] productPropertyStatesToDelete = [ ppState for ppState in productPropertyStates if ppState.propertyId not in productPropertyIds ] logger.debug( u"Following productPropertyStates are marked to delete: '%s'" % productPropertyStatesToDelete) if productPropertyStatesToDelete: dataBackend.productPropertyState_deleteObjects( productPropertyStatesToDelete) logger.info( u"Setting product property states in backend") productPropertyStates = [ ProductPropertyState( productId=productId, propertyId=productProperty.propertyId, objectId=depotId, values=productProperty.defaultValues) for productProperty in productProperties ] for productPropertyState in productPropertyStates: if productPropertyState.propertyId in propertyDefaultValues: try: productPropertyState.setValues( propertyDefaultValues[ productPropertyState. propertyId]) except Exception as installationError: logger.error( u"Failed to set default values to {0} for productPropertyState {1}: {2}", propertyDefaultValues[ productPropertyState. propertyId], productPropertyState, installationError) dataBackend.productPropertyState_createObjects( productPropertyStates) if not suppressPackageContentFileGeneration: ppf.createPackageContentFile() else: logger.debug( "Suppressed generation of package content file." ) cleanUpProducts(dataBackend, productOnDepot.productId) cleanUpProductPropertyStates(dataBackend, productProperties, depotId, productOnDepot) except Exception as installingPackageError: logger.debug(u"Failed to install the package {!r}", filename) logger.logException(installingPackageError, logLevel=LOG_DEBUG) raise installingPackageError except Exception as installationError: logger.logException(installationError) raise BackendError( u"Failed to install package '%s' on depot '%s': %s" % (filename, depotId, installationError))
def parse(self, configuration=None): """ Parse the configuration file. :param confiuration: Predefined configuration. Contents may be \ overriden based on values in configuration file. :rtype: dict """ logger.info(u"Reading config file '%s'" % self.configFile) if not os.path.isfile(self.configFile): raise OSError(u"Configuration file {!r} not found".format( self.configFile)) config = DEFAULT_CONFIG.copy() if configuration: config.update(configuration) config['repositories'] = [] try: iniFile = IniFile(filename=self.configFile, raw=True) configIni = iniFile.parse() for section in configIni.sections(): if section.lower() == 'general': for (option, value) in configIni.items(section): if option.lower() == 'packagedir': config["packageDir"] = forceFilename(value.strip()) elif option.lower() == 'logfile': value = forceFilename(value.strip()) logger.setLogFile(value) elif option.lower() == 'loglevel': logger.setFileLevel(forceInt(value.strip())) elif option.lower() == 'timeout': # TODO: find a better way! socket.setdefaulttimeout(float(value.strip())) elif option.lower() == 'tempdir': config["tempdir"] = value.strip() elif option.lower() == 'repositoryconfigdir': config["repositoryConfigDir"] = value.strip() elif option.lower() == 'proxy' and value.strip(): config["proxy"] = forceUrl(value.strip()) elif section.lower() == 'notification': for (option, value) in configIni.items(section): if option.lower() == 'active': config["notification"] = forceBool(value) elif option.lower() == 'smtphost': config["smtphost"] = forceHostAddress( value.strip()) elif option.lower() == 'smtpport': config["smtpport"] = forceInt(value.strip()) elif option.lower() == 'smtpuser': config["smtpuser"] = forceUnicode(value.strip()) elif option.lower() == 'smtppassword': config["smtppassword"] = forceUnicode( value.strip()) elif option.lower() == 'subject': config["subject"] = forceUnicode(value.strip()) elif option.lower() == 'use_starttls': config["use_starttls"] = forceBool(value.strip()) elif option.lower() == 'sender': config["sender"] = forceEmailAddress(value.strip()) elif option.lower() == 'receivers': config["receivers"] = [] for receiver in splitAndStrip(value, u","): config["receivers"].append( forceEmailAddress(receiver)) elif section.lower() == 'wol': for (option, value) in configIni.items(section): if option.lower() == 'active': config["wolAction"] = forceBool(value.strip()) elif option.lower() == 'excludeproductids': config['wolActionExcludeProductIds'] = [] for productId in splitAndStrip(value, u','): config["wolActionExcludeProductIds"].append( forceProductId(productId)) elif option.lower() == 'shutdownwanted': config["wolShutdownWanted"] = forceBool( value.strip()) elif option.lower() == 'startgap': config["wolStartGap"] = forceInt(value.strip()) if config["wolStartGap"] < 0: config["wolStartGap"] = 0 elif section.lower() == 'installation': for (option, value) in configIni.items(section): if option.lower() == 'windowstart': if not value.strip(): continue if not self.TIME_REGEX.search(value.strip()): raise ValueError( u"Start time '%s' not in needed format 'HH:MM'" % value.strip()) config[ "installationWindowStartTime"] = value.strip() elif option.lower() == 'windowend': if not value.strip(): continue if not self.TIME_REGEX.search(value.strip()): raise ValueError( u"End time '%s' not in needed format 'HH:MM'" % value.strip()) config["installationWindowEndTime"] = value.strip() elif option.lower() == 'exceptproductids': config['installationWindowExceptions'] = [] for productId in splitAndStrip(value, ','): config["installationWindowExceptions"].append( forceProductId(productId)) elif section.lower().startswith('repository'): try: repository = self._getRepository( configIni, section, config['forceRepositoryActivation'], config['repositoryName'], config['installAllAvailable'], config['proxy']) config['repositories'].append(repository) except MissingConfigurationValueError as mcverr: logger.debug( u"Configuration for {section} incomplete: {error}", error=mcverr, section=section) except ConfigurationError as cerr: logger.error( u"Configuration problem in {section}: {error}", error=cerr, section=section) except Exception as err: logger.error( u"Can't load repository from {section}: {error}", error=err, section=section) else: logger.error(u"Unhandled section '%s'" % section) except Exception as exclude: raise RuntimeError(u"Failed to read config file '%s': %s" % (self.configFile, exclude)) for configFile in getRepoConfigs(config['repositoryConfigDir']): iniFile = IniFile(filename=configFile, raw=True) try: repoConfig = iniFile.parse() for section in repoConfig.sections(): if not section.lower().startswith('repository'): continue try: repository = self._getRepository( repoConfig, section, config['forceRepositoryActivation'], config['repositoryName'], config['installAllAvailable'], proxy=config['proxy']) config['repositories'].append(repository) except MissingConfigurationValueError as mcverr: logger.debug( u"Configuration for {section} in {filename} incomplete: {error}", error=mcverr, section=section, filename=configFile) except ConfigurationError as cerr: logger.error( u"Configuration problem in {section} in {filename}: {error}", error=cerr, section=section, filename=configFile) except Exception as err: logger.error( u"Can't load repository from {section} in {filename}: {error}", error=err, section=section, filename=configFile) except Exception as error: logger.error( "Unable to load repositories from {filename}: {error}", filename=configFile, error=error) return config
def _getRepository(self, config, section, forceRepositoryActivation=False, repositoryName=None, installAllAvailable=False, proxy=None): active = False baseUrl = None opsiDepotId = None for (option, value) in config.items(section): option = option.lower() value = value.strip() if option == 'active': active = forceBool(value) elif option == 'baseurl': if value: baseUrl = forceUrl(value) elif option == 'opsidepotid': if value: opsiDepotId = forceHostId(value) elif option == 'proxy': if value: proxy = forceUrl(value) repoName = section.replace('repository_', '', 1) if forceRepositoryActivation: if repoName == repositoryName: logger.debug("Activation for repository {0} forced.", repoName) active = True else: active = False repository = None if opsiDepotId: if not self.backend: raise RequiringBackendError( "Repository section '{0}' supplied an depot ID but we have no backend to check." .format(section)) depots = self.backend.host_getObjects(type='OpsiDepotserver', id=opsiDepotId) if not depots: raise ConfigurationError(u"Depot '%s' not found in backend" % opsiDepotId) if not depots[0].repositoryRemoteUrl: raise ConfigurationError( u"Repository remote url for depot '%s' not found in backend" % opsiDepotId) repository = ProductRepositoryInfo( name=repoName, baseUrl=depots[0].repositoryRemoteUrl, dirs=['/'], username=self.depotId, password=self.depotKey, opsiDepotId=opsiDepotId, active=active) elif baseUrl: if proxy: logger.info(u"Repository {} is using proxy {}", repoName, proxy) repository = ProductRepositoryInfo(name=repoName, baseUrl=baseUrl, proxy=proxy, active=active) else: raise MissingConfigurationValueError( u"Repository section '{0}': neither baseUrl nor opsiDepotId set" .format(section)) for (option, value) in config.items(section): if option.lower() == 'username': repository.username = forceUnicode(value.strip()) elif option.lower() == 'password': repository.password = forceUnicode(value.strip()) if repository.password: logger.addConfidentialString(repository.password) elif option.lower() == 'authcertfile': repository.authcertfile = forceFilename(value.strip()) elif option.lower() == 'authkeyfile': repository.authkeyfile = forceFilename(value.strip()) elif option.lower() == 'autoinstall': repository.autoInstall = forceBool(value.strip()) elif option.lower() == 'autoupdate': repository.autoUpdate = forceBool(value.strip()) elif option.lower() == 'autosetup': repository.autoSetup = forceBool(value.strip()) elif option.lower() == 'onlydownload': repository.onlyDownload = forceBool(value.strip()) elif option.lower() == 'inheritproductproperties': if not opsiDepotId: logger.warning( u"InheritProductProperties not possible with normal http ressource." ) repository.inheritProductProperties = False else: repository.inheritProductProperties = forceBool( value.strip()) elif option.lower() == 'dirs': repository.dirs = [] for directory in splitAndStrip(value, ','): repository.dirs.append(forceFilename(directory)) elif option.lower() == 'excludes': repository.excludes = [] for exclude in splitAndStrip(value, ','): repository.excludes.append(re.compile(exclude)) elif option.lower() == 'includeproductids': repository.includes = [] for include in splitAndStrip(value, ','): repository.includes.append(re.compile(include)) elif option.lower() == 'description': repository.description = forceUnicode(value) if installAllAvailable: repository.autoInstall = True repository.autoUpdate = True repository.excludes = [] return repository
def __init__(self, host, port, socketTimeout=None, connectTimeout=None, retryTime=0, maxsize=1, block=False, reuseConnection=False, verifyServerCert=False, serverCertFile=None, caCertFile=None, verifyServerCertByCa=False, proxyURL=None): self.host = forceUnicode(host) self.port = forceInt(port) self.socketTimeout = forceInt(socketTimeout or 0) self.connectTimeout = forceInt(connectTimeout or 0) self.retryTime = forceInt(retryTime) self.block = forceBool(block) self.reuseConnection = forceBool(reuseConnection) self.proxyURL = forceUnicode(proxyURL or u"") self.pool = None self.usageCount = 1 self.num_connections = 0 self.num_requests = 0 self.httplibDebugLevel = 0 self.peerCertificate = None self.serverVerified = False self.verifyServerCert = False self.serverCertFile = None self.caCertFile = None self.verifyServerCertByCa = False if isinstance(self, HTTPSConnectionPool): if self.host in ('localhost', '127.0.0.1'): self.serverVerified = True logger.debug(u"No host verification for localhost") else: if caCertFile: self.caCertFile = forceFilename(caCertFile) self.verifyServerCertByCa = forceBool(verifyServerCertByCa) if self.verifyServerCertByCa: if not self.caCertFile: raise ValueError( u"Server certificate verfication by CA enabled but no CA cert file given" ) logger.info( u"Server certificate verfication by CA file '%s' enabled for host '%s'" % (self.caCertFile, self.host)) else: self.verifyServerCert = forceBool(verifyServerCert) if serverCertFile: self.serverCertFile = forceFilename(serverCertFile) if self.verifyServerCert: if not self.serverCertFile: raise ValueError( u"Server verfication enabled but no server cert file given" ) logger.info( u"Server verfication by server certificate enabled for host '%s'" % self.host) self.adjustSize(maxsize)
def testforceFilename(path, expected): result = forceFilename(path) assert expected == result assert isinstance(expected, unicode)
def __init__(self, address, **kwargs): """ Backend for JSON-RPC access to another opsi service. :param compression: Specify compression usage. Can be a boolean \ or the strings 'gzip' or 'deflate' in case a specific compression is desired. :type compression: bool or str :param deflate: Specify if deflate compression should be used for requests. \ Deprecated: Use keyword 'compression' instead. :type deflate: bool """ self._name = 'jsonrpc' Backend.__init__(self, **kwargs) self._application = 'opsi jsonrpc module version %s' % __version__ self._sessionId = None self._compression = False self._connectOnInit = True self._connected = False self._retryTime = 5 self._host = None self._port = None self._baseUrl = u'/rpc' self._protocol = 'https' self._socketTimeout = None self._connectTimeout = 30 self._connectionPoolSize = 2 self._interface = None self._rpcId = 0 self._rpcIdLock = threading.Lock() self._async = False self._rpcQueue = None self._rpcQueuePollingTime = 0.01 self._rpcQueueSize = 10 self._serverCertFile = None self._caCertFile = None self._verifyServerCert = False self._verifyServerCertByCa = False self._verifyByCaCertsFile = None self._proxyURL = None if not self._username: self._username = u'' if not self._password: self._password = u'' retry = True for (option, value) in kwargs.items(): option = option.lower() if option == 'application': self._application = str(value) elif option == 'sessionid': self._sessionId = str(value) elif option == 'deflate': if forceBool(value): self.setCompression('deflate') else: self.setCompression(False) elif option == 'compression': self._compression = self._parseCompressionValue(value) elif option == 'connectoninit': self._connectOnInit = forceBool(value) elif option == 'connecttimeout' and value is not None: self._connectTimeout = forceInt(value) elif option == 'connectionpoolsize' and value is not None: self._connectionPoolSize = forceInt(value) elif option in ('timeout', 'sockettimeout') and value is not None: self._socketTimeout = forceInt(value) elif option == 'retry': retry = forceBool(value) elif option == 'retrytime': self._retryTime = forceInt(value) elif option == 'rpcqueuepollingtime': self._rpcQueuePollingTime = forceFloat(value) elif option == 'rpcqueuesize': self._rpcQueueSize = forceInt(value) elif option == 'servercertfile' and value is not None: self._serverCertFile = forceFilename(value) elif option == 'verifyservercert': self._verifyServerCert = forceBool(value) elif option == 'cacertfile' and value is not None: self._caCertFile = forceFilename(value) elif option == 'verifyservercertbyca': self._verifyServerCertByCa = forceBool(value) elif option == 'proxyurl' and value is not None: logger.debug(u"ProxyURL detected: '%s'" % value) self._proxyURL = forceUnicode(value) if not retry: self._retryTime = 0 if self._password: logger.addConfidentialString(self._password) self._processAddress(address) self._connectionPool = getSharedConnectionPool( scheme=self._protocol, host=self._host, port=self._port, socketTimeout=self._socketTimeout, connectTimeout=self._connectTimeout, retryTime=self._retryTime, maxsize=self._connectionPoolSize, block=True, verifyServerCert=self._verifyServerCert, serverCertFile=self._serverCertFile, caCertFile=self._caCertFile, verifyServerCertByCa=self._verifyServerCertByCa, proxyURL=self._proxyURL) if self._connectOnInit: self.connect()
def _create(self, fileList, baseDir, command): curDir = os.path.abspath(os.getcwd()) try: baseDir = os.path.abspath(forceFilename(baseDir)) if not os.path.isdir(baseDir): raise IOError(u"Base dir '%s' not found" % baseDir) os.chdir(baseDir) logger.info(u"Executing: %s" % command) proc = subprocess.Popen(command, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) encoding = proc.stdin.encoding if not encoding: encoding = locale.getpreferredencoding() flags = fcntl.fcntl(proc.stdout, fcntl.F_GETFL) fcntl.fcntl(proc.stdout, fcntl.F_SETFL, flags | os.O_NONBLOCK) flags = fcntl.fcntl(proc.stderr, fcntl.F_GETFL) fcntl.fcntl(proc.stderr, fcntl.F_SETFL, flags | os.O_NONBLOCK) if self._progressSubject: self._progressSubject.setEnd(len(fileList)) self._progressSubject.setState(0) error = '' ret = None for filename in fileList: if not filename: continue if not os.path.exists(filename): raise IOError(u"File '%s' not found" % filename) # python 2.6: if filename.startswith(baseDir): #f = os.path.relpath(f, baseDir) filename = filename[len(baseDir):] while filename.startswith('/'): filename = filename[1:] logger.info(u"Adding file '%s'" % filename) proc.stdin.write("%s\n" % filename.encode(encoding)) try: chunk = proc.stdout.read() if chunk: filesAdded = chunk.count('\n') if filesAdded > 0: if self._progressSubject: self._progressSubject.addToState(filesAdded) except Exception: pass try: chunk = proc.stderr.read() if chunk: error += chunk filesAdded = chunk.count('\n') if filesAdded > 0: if self._progressSubject: self._progressSubject.addToState(filesAdded) except Exception: time.sleep(0.001) proc.stdin.close() while ret is None: ret = proc.poll() try: chunk = proc.stdout.read() except Exception: pass try: chunk = proc.stderr.read() if chunk: if self._progressSubject: self._progressSubject.addToState(chunk.count('\n')) error += chunk except Exception: pass logger.info(u"Exit code: %s" % ret) if ret != 0: error = error.decode(encoding, 'replace') logger.error(error) raise RuntimeError(u"Command '%s' failed with code %s: %s" % (command, ret, error)) if self._progressSubject: self._progressSubject.setState(len(fileList)) finally: os.chdir(curDir)
def findFiles(directory, prefix=u'', excludeDir=None, excludeFile=None, includeDir=None, includeFile=None, returnDirs=True, returnLinks=True, followLinks=False, repository=None): directory = forceFilename(directory) prefix = forceUnicode(prefix) if excludeDir: if not isRegularExpressionPattern(excludeDir): excludeDir = re.compile(forceUnicode(excludeDir)) else: excludeDir = None if excludeFile: if not isRegularExpressionPattern(excludeFile): excludeFile = re.compile(forceUnicode(excludeFile)) else: excludeFile = None if includeDir: if not isRegularExpressionPattern(includeDir): includeDir = re.compile(forceUnicode(includeDir)) else: includeDir = None if includeFile: if not isRegularExpressionPattern(includeFile): includeFile = re.compile(forceUnicode(includeFile)) else: includeFile = None returnDirs = forceBool(returnDirs) returnLinks = forceBool(returnLinks) followLinks = forceBool(followLinks) if repository: islink = repository.islink isdir = repository.isdir listdir = repository.listdir else: islink = os.path.islink isdir = os.path.isdir listdir = os.listdir files = [] for entry in listdir(directory): if isinstance(entry, str): # TODO how to handle this with Python 3? logger.error( u"Bad filename '%s' found in directory '%s', skipping entry!" % (unicode(entry, 'ascii', 'replace'), directory)) continue pp = os.path.join(prefix, entry) dp = os.path.join(directory, entry) isLink = False if islink(dp): isLink = True if not returnLinks and not followLinks: continue if isdir(dp) and (not isLink or followLinks): if excludeDir and re.search(excludeDir, entry): logger.debug(u"Excluding dir '%s' and containing files" % entry) continue if includeDir: if not re.search(includeDir, entry): continue logger.debug(u"Including dir '%s' and containing files" % entry) if returnDirs: files.append(pp) files.extend( findFiles(directory=dp, prefix=pp, excludeDir=excludeDir, excludeFile=excludeFile, includeDir=includeDir, includeFile=includeFile, returnDirs=returnDirs, returnLinks=returnLinks, followLinks=followLinks, repository=repository)) continue if excludeFile and re.search(excludeFile, entry): if isLink: logger.debug(u"Excluding link '%s'" % entry) else: logger.debug(u"Excluding file '%s'" % entry) continue if includeFile: if not re.search(includeFile, entry): continue if isLink: logger.debug(u"Including link '%s'" % entry) else: logger.debug(u"Including file '%s'" % entry) files.append(pp) return files
logger.setConsoleLevel(LOG_DEBUG) exception = None try: parser = argparse.ArgumentParser() parser.add_argument("--host", help="Notification server host", default=u'127.0.0.1') parser.add_argument("-p", "--port", type=int, help="Notification server port", default=0) parser.add_argument("-i", "--id", dest="notificationClientId", help="Notification client id", default=None) parser.add_argument("-l", "--log-file", dest="logFile", help="Log file to use.") args = parser.parse_args() args.port = forceUnicode(args.port) args.notificationClientId = forceUnicode(args.notificationClientId) if args.logFile: logFile = forceFilename(args.logFile) # TODO: logrotate? if os.path.exists(logFile): logger.notice("Deleting old log file: %s" % logFile) os.unlink(logFile) logger.notice("Setting log file: %s" % logFile) logger.setLogFile(logFile) logger.setFileLevel(LOG_DEBUG) w = OpsiDialogWindow() except Exception as err: # pylint: disable=broad-except logger.error(err, exc_info=True) tb = sys.exc_info()[2] while tb is not None: f = tb.tb_frame c = f.f_code