def smokeTest(java, baseURL, gitRevision, version, tmpDir, isSigned, local_keys, testArgs, downloadOnly=False): startTime = datetime.datetime.now() # Tests annotated @Nightly are more resource-intensive but often cover # important code paths. They're disabled by default to preserve a good # developer experience, but we enable them for smoke tests where we want good # coverage. Still we disable @BadApple tests testArgs = '-Dtests.nightly=true -Dtests.badapples=false %s' % testArgs if FORCE_CLEAN: if os.path.exists(tmpDir): raise RuntimeError('temp dir %s exists; please remove first' % tmpDir) if not os.path.exists(tmpDir): os.makedirs(tmpDir) solrPath = None print() print('Load release URL "%s"...' % baseURL) newBaseURL = unshortenURL(baseURL) if newBaseURL != baseURL: print(' unshortened: %s' % newBaseURL) baseURL = newBaseURL for text, subURL in getDirEntries(baseURL): if text.lower().find('solr') != -1: solrPath = subURL if solrPath is None: raise RuntimeError('could not find solr subdir') print() print('Get KEYS...') if local_keys is not None: print(" Using local KEYS file %s" % local_keys) keysFile = local_keys else: keysFileURL = "https://archive.apache.org/dist/solr/KEYS" print(" Downloading online KEYS file %s" % keysFileURL) scriptutil.download('KEYS', keysFileURL, tmpDir, force_clean=FORCE_CLEAN) keysFile = '%s/KEYS' % (tmpDir) if is_port_in_use(8983): raise RuntimeError('Port 8983 is already in use. The smoketester needs it to test Solr') print() print('Test Solr...') checkSigs(solrPath, version, tmpDir, isSigned, keysFile) if not downloadOnly: unpackAndVerify(java, tmpDir, 'solr-%s.tgz' % version, gitRevision, version, testArgs) unpackAndVerify(java, tmpDir, 'solr-%s-src.tgz' % version, gitRevision, version, testArgs) print() print('Test Maven artifacts...') checkMaven(baseURL, tmpDir, gitRevision, version, isSigned, keysFile) else: print("Solr test done (--download-only specified)") print('\nSUCCESS! [%s]\n' % (datetime.datetime.now() - startTime))
def crawl(downloadedFiles, urlString, targetDir, exclusions=set()): for text, subURL in getDirEntries(urlString): if text not in exclusions: path = os.path.join(targetDir, text) if text.endswith('/'): if not os.path.exists(path): os.makedirs(path) crawl(downloadedFiles, subURL, path, exclusions) else: if not os.path.exists(path) or FORCE_CLEAN: scriptutil.download(text, subURL, targetDir, quiet=True, force_clean=FORCE_CLEAN) downloadedFiles.append(path) sys.stdout.write('.')
def getBinaryDistFiles(tmpDir, version, baseURL): distribution = 'lucene-%s.tgz' % version if not os.path.exists('%s/%s' % (tmpDir, distribution)): distURL = '%s/lucene/%s' % (baseURL, distribution) print(' download %s...' % distribution, end=' ') scriptutil.download(distribution, distURL, tmpDir, force_clean=FORCE_CLEAN) destDir = '%s/unpack-lucene-getBinaryDistFiles' % tmpDir if os.path.exists(destDir): shutil.rmtree(destDir) os.makedirs(destDir) os.chdir(destDir) print(' unpack %s...' % distribution) unpackLogFile = '%s/unpack-%s-getBinaryDistFiles.log' % (tmpDir, distribution) run('tar xzf %s/%s' % (tmpDir, distribution), unpackLogFile) distributionFiles = [] for root, dirs, files in os.walk(destDir): distributionFiles.extend([os.path.join(root, file) for file in files]) return distributionFiles
def checkSigs(urlString, version, tmpDir, isSigned, keysFile): print(' test basics...') ents = getDirEntries(urlString) artifact = None changesURL = None mavenURL = None artifactURL = None expectedSigs = [] if isSigned: expectedSigs.append('asc') expectedSigs.extend(['sha512']) sigs = [] artifacts = [] for text, subURL in ents: if text == 'KEYS': raise RuntimeError('lucene: release dir should not contain a KEYS file - only toplevel /dist/lucene/KEYS is used') elif text == 'maven/': mavenURL = subURL elif text.startswith('changes'): if text not in ('changes/', 'changes-%s/' % version): raise RuntimeError('lucene: found %s vs expected changes-%s/' % (text, version)) changesURL = subURL elif artifact is None: artifact = text artifactURL = subURL expected = 'lucene-%s' % version if not artifact.startswith(expected): raise RuntimeError('lucene: unknown artifact %s: expected prefix %s' % (text, expected)) sigs = [] elif text.startswith(artifact + '.'): sigs.append(text[len(artifact)+1:]) else: if sigs != expectedSigs: raise RuntimeError('lucene: artifact %s has wrong sigs: expected %s but got %s' % (artifact, expectedSigs, sigs)) artifacts.append((artifact, artifactURL)) artifact = text artifactURL = subURL sigs = [] if sigs != []: artifacts.append((artifact, artifactURL)) if sigs != expectedSigs: raise RuntimeError('lucene: artifact %s has wrong sigs: expected %s but got %s' % (artifact, expectedSigs, sigs)) expected = ['lucene-%s-src.tgz' % version, 'lucene-%s.tgz' % version] actual = [x[0] for x in artifacts] if expected != actual: raise RuntimeError('lucene: wrong artifacts: expected %s but got %s' % (expected, actual)) # Set up clean gpg world; import keys file: gpgHomeDir = '%s/lucene.gpg' % tmpDir if os.path.exists(gpgHomeDir): shutil.rmtree(gpgHomeDir) os.makedirs(gpgHomeDir, 0o700) run('gpg --homedir %s --import %s' % (gpgHomeDir, keysFile), '%s/lucene.gpg.import.log' % tmpDir) if mavenURL is None: raise RuntimeError('lucene is missing maven') if changesURL is None: raise RuntimeError('lucene is missing changes-%s' % version) testChanges(version, changesURL) for artifact, urlString in artifacts: print(' download %s...' % artifact) scriptutil.download(artifact, urlString, tmpDir, force_clean=FORCE_CLEAN) verifyDigests(artifact, urlString, tmpDir) if isSigned: print(' verify sig') # Test sig (this is done with a clean brand-new GPG world) scriptutil.download(artifact + '.asc', urlString + '.asc', tmpDir, force_clean=FORCE_CLEAN) sigFile = '%s/%s.asc' % (tmpDir, artifact) artifactFile = '%s/%s' % (tmpDir, artifact) logFile = '%s/lucene.%s.gpg.verify.log' % (tmpDir, artifact) run('gpg --homedir %s --display-charset utf-8 --verify %s %s' % (gpgHomeDir, sigFile, artifactFile), logFile) # Forward any GPG warnings, except the expected one (since it's a clean world) with open(logFile) as f: print("File: %s" % logFile) for line in f.readlines(): if line.lower().find('warning') != -1 \ and line.find('WARNING: This key is not certified with a trusted signature') == -1: print(' GPG: %s' % line.strip()) # Test trust (this is done with the real users config) run('gpg --import %s' % (keysFile), '%s/lucene.gpg.trust.import.log' % tmpDir) print(' verify trust') logFile = '%s/lucene.%s.gpg.trust.log' % (tmpDir, artifact) run('gpg --display-charset utf-8 --verify %s %s' % (sigFile, artifactFile), logFile) # Forward any GPG warnings: with open(logFile) as f: for line in f.readlines(): if line.lower().find('warning') != -1: print(' GPG: %s' % line.strip())