Exemplo n.º 1
0
	def patchSources(self, fetchedDir):
		with DeterministicTimestamps(self.timestamp):
			if self.patches:
				patchingMsg = styles.operationName("Patching") + " ..."
				print(patchingMsg)
				with chosenProgressReporter(len(self.patches), str(patchingMsg)) as pb:
					for p in self.patches:
						applyPatch(p, fetchedDir)
						pb.report(p)
Exemplo n.º 2
0
    def __call__(self,
                 pkg: PackageInstalledFiles,
                 root: typing.Optional[Path] = None):
        if root is None:
            root = pkg.root / "subpackages"
        childPkg = getPackageInstalledFilesFromRefAndParent(self.ref, root)

        with chosenProgressReporter(len(self.actions), "Doing actions for " +
                                    str(childPkg.ref)) as pb:
            for a in self.actions:
                #pb.print(str(a))
                a(pkg, childPkg)
                pb.report(str(a))
        assert childPkg.filesTracker.filesAndSymlinks
        return childPkg
Exemplo n.º 3
0
def runTestsForGenerator(tests, runner, transpilationResult):
	"""Runs tests for a transpiled grammar using a specific runner (usually associated to a backend)."""
	compiler = parsersFactoriesAndCompilersPool(runner.COMPILER)
	parserFactory = parsersFactoriesAndCompilersPool(runner.PARSER)

	compiled = compiler.compileStr(transpilationResult.text, "python")
	parser = parserFactory.fromInternal(compiled)

	with chosenProgressReporter(len(tests), "testing") as pb:
		for i, test in enumerate(tests):
			try:
				parser(test)
				pb.report((test if len(test) < 10 else ("test " + str(i))))
			except BaseException as ex:  # pylint: disable=broad-except
				print(repr(test), file=pb)
				print(ex, file=pb)
Exemplo n.º 4
0
def runTests(generatorsToToolsMapping, fileResMapping, toolsCount):
	"""Runs tests for transpiled grammars."""
	print()

	for f, transpiled in fileResMapping.items():
		baseDir = f.absolute().parent

		results = transpiled.backendResultMapping
		with chosenProgressReporter(toolsCount, "testing grammars") as pb:
			for generator, transpilationResult in results.items():
				for tool in generatorsToToolsMapping[generator]:
					pb.report(tool.__name__, incr=0, op="testing")
					tests = tuple(transpiled.grammar.tests.getTests(baseDir))
					if tool.RUNNER is None:
						warnings.warn("Runner for " + repr(tool) + " is not yet implemented due to some reasons, you may want to compile manually")
						continue
					runner = runnersPool(tool.RUNNER)
					runTestsForGenerator(tests, runner, transpilationResult)
					pb.report(tool.__name__, incr=1, op="tested")
Exemplo n.º 5
0
    def generateRepo(self) -> None:
        oldPath = Path.cwd()
        oldDescr = os.open(oldPath, os.O_RDONLY)
        rootDescr = None
        #try:
        print("self.packages2add", self.packages2add)
        rootDescr = os.open(self.root, os.O_RDONLY)
        os.fchdir(rootDescr)

        packagesPaths = []
        for pkg in self.packages2add:
            if isinstance(pkg, Path):
                pkgPath = pkg
            else:
                pkgPath = pkg.builtPath
            packagesPaths.append(pkgPath)

        repreproCmds = getRepreproCmds(self.root.parent,
                                       *packagesPaths,
                                       _cwd=self.root)
        repreproCmds["export"]()
        repreproCmds["createSymlinks"]()

        with chosenProgressReporter(len(packagesPaths),
                                    "Publishing as a deb repo") as pb:
            for pkgPath in packagesPaths:
                pb.print(
                    styles.operationName("adding") + " " +
                    styles.varContent(str(pkgPath)))
                for r in self.releases:
                    for cn in r.codenames:
                        repreproCmds["removePackage"](cn, pkgPath.stem)
                        repreproCmds["includeDeb"](cn, pkgPath)
                pb.report(pkgPath)

        self.packages2add = []
        #except:
        #	raise
        #finally:
        if rootDescr is not None:
            os.close(rootDescr)
        os.fchdir(oldDescr)
        os.close(oldDescr)
Exemplo n.º 6
0
	def main(self, backends="all", *files: cli.ExistingFile):  # pylint:disable=keyword-arg-before-vararg,arguments-differ
		outDir = Path(self.outDir).absolute()
		b = ParserBundle(outDir)

		generatorsToToolsMapping, transpiledFiles, toolsCount = self.prepare(backends, *files)

		for transpiled in transpiledFiles.values():
			for generator, transpiledResult in transpiled.backendResultMapping.items():
				for tool in generatorsToToolsMapping[generator]:
					if not issubclass(tool.RUNNER, NotYetImplementedRunner):
						runner = runnersPool(tool.RUNNER)
						compiler = parsersFactoriesAndCompilersPool(runner.COMPILER)
						compiled = compiler.compileStr(transpiledResult.text, "python")

						runner.saveCompiled(compiled, b.grammars[transpiledResult.id], generator.META)
					else:
						warnings.warn("Runner for " + repr(tool) + " is not yet implemented due to some reasons, you may want to compile manually")

		#b.initGenerators()

		with chosenProgressReporter(len(files), "compiling for backends") as pb:
			for f in files:
				f = Path(f)
				baseDir = f.absolute().parent

				pb.report(str(f), incr=0, op="generating wrapper")
				g = parseUniGrammarFile(f)
				sourceAST, caplessSchema, iterlessSchema = WrapperGen.transpile(g)

				thisR = b.grammars[g.meta.id]
				thisR.capSchema = caplessSchema
				thisR.iterSchema = list(iterlessSchema)
				thisR.wrapperAST = sourceAST

				#pb.report(str(f), incr=0, op="benchmarking")
				tests = g.tests.getTests(baseDir)
				sampleToBench = tuple(tests)[-1]
				#thisR.benchmarkAndUpdate(sampleToBench)
				pb.report(str(f), incr=1)

		b.save()
Exemplo n.º 7
0
    def createDB(self):
        self.trieWasModified = True

        pkgCount, pkgs = self.populator()
        with chosenProgressReporter(pkgCount, "Populating database") as pb:
            for fi in pkgs:
                name = fi.ref.name
                arch = fi.ref.arch
                archId = self.getArch_(arch)
                if archId is None:
                    archId = self.insertArch_(arch)

                pkgId = self.getPackageByNameAndArch_(name, archId)
                if pkgId is None:
                    if isinstance(fi.ref, VersionedPackageRef):
                        pkgId = self.insertPackage_(name, archId,
                                                    str(fi.ref.version))
                    else:
                        pkgId = self.insertPackage_(name, archId)

                for f in fi.files:
                    self.dt[str(f)] = pkgId
                pb.report(fi.ref)
Exemplo n.º 8
0
	def extractArchitectureAndDependenciesFromELF(self) -> None:
		for pkg in self.packages:
			assert isinstance(pkg, Package), repr(pkg)
			deps = set()
			
			for groups, extr in dependenciesExtractors:
				if pkg.ref.group in groups:
					print(styles.operationName("extracting") + " " + styles.entity("architecture-dependent info") + " from " + styles.varContent(str(pkg.ref)) + " using " + styles.entity(extr.__class__.__name__) + " ...")
					filez = sorted(pkg.filesTracker.files)
					assert filez
					
					with chosenProgressReporter(len(filez), str("extracting info")) as pb:
						for fp in filez:
							f = pkg.nest(fp)
							if f.is_file():
								#print(styles.operationName("extracting") + " " + styles.entity("architecture-dependent info") + " from " + styles.entity("file") + ": " + styles.varContent(str(f)))
								archAndDeps = extr(f)
								if archAndDeps:
									pkg.deps = archAndDeps.deps
									pkg.depsResolver = archAndDeps.depsResolver
									resultArch = self.distro.archTransformer(archAndDeps)
									if resultArch:
										if pkg.ref.arch is None:
											pkg.ref.arch = resultArch
										elif pkg.ref.arch != resultArch:
											raise ValueError("Package " + str(pkg.ref) + " contains binaries for different architectures, at least: " + pkg.ref.arch + " and " + resultArch)
							pb.report(fp)
						#print(styles.operationName("extracted") + ": " + styles.varContent(str(pkg.deps)))
				else:
					print(styles.entity(groups) + " " + styles.operationName("skipped") + ": " + styles.varContent(str(pkg.ref)) )

			if pkg.ref.arch is None:
				if pkg.ref.group in archNeutralGroups:
					#pkg.ref.arch = self.distro.archTransformer("any")
					pkg.ref.arch = self.distro.archTransformer("all")  # F**K, `Error looking at 'package.deb': 'any' is not one of the valid architectures: 'amd64'`
				else:
					pkg.ref.arch = self.distro.archTransformer("all")
Exemplo n.º 9
0
	def buildDistroSpecificPackages(self) -> None:
		with chosenProgressReporter(len(self.distrosStreams), "Building packages (probably lazily)") as pb:
			for s in self.distrosStreams:
				s.buildPackages(self.runConfig.builtDir)
				pb.report(s.distro.name)
Exemplo n.º 10
0
	def augmentMetadata(self) -> None:
		with chosenProgressReporter(len(self.distrosStreams), "Augmenting metadata") as pb:
			for s in self.distrosStreams:
				s.augmentMetadata()
				pb.report(s.distro.name)