def dump(self): with utils.create(self.mapdumpPath) as mapdumpFile: mapdumpFile.write('mapVersion: {}\n\n'.format(mapVersion)) mapdumpFile.write('targetPath: {}\n\n'.format(self.targetPath)) mapdumpFile.write('sourcePaths: {}\n\n'.format(self.sourcePaths)) mapdumpFile.write('mappings:\n') for mapping in self.mappings: mapdumpFile.write('\t{}\n'.format(mapping)) with utils.create(self.deltaMapdumpPath) as deltaMapdumpFile: for group in self.deltaMappings: deltaMapdumpFile.write('(New group) ') for segment in group: deltaMapdumpFile.write('Segment: {}\n'.format(segment))
def dump(self): with utils.create(self.mapdumpPath) as mapdumpFile: mapdumpFile.write("mapVersion: {}\n\n".format(mapVersion)) mapdumpFile.write("targetPath: {}\n\n".format(self.targetPath)) mapdumpFile.write("sourcePaths: {}\n\n".format(self.sourcePaths)) mapdumpFile.write("mappings:\n") for mapping in self.mappings: mapdumpFile.write("\t{}\n".format(mapping)) with utils.create(self.deltaMapdumpPath) as deltaMapdumpFile: for group in self.deltaMappings: deltaMapdumpFile.write("(New group) ") for segment in group: deltaMapdumpFile.write("Segment: {}\n".format(segment))
def cascadeAndSaveMiniMap(self): def getCascadedMapping( shrinkMapping ): # N.B. self.prettyMappings has to be sorted in advance prettyMapping = self.prettyMappings[min( shrinkMapping[iSourceLine], len(self.prettyMappings) - 1)] result = ( shrinkMapping[:iTargetColumn + 1] # Target location from shrink mapping + prettyMapping[iSourceIndex:] # Source location from self ) if self.dump: self.cascadeMapdumpFile.write('{} {} {}\n'.format( result, shrinkMapping, prettyMapping)) return result if self.dump: self.cascadeMapdumpFile = utils.create( f'{self.targetDir}/{self.moduleName}.cascade_map_dump') self.miniMappings = [ getCascadedMapping(shrinkMapping) for shrinkMapping in self.shrinkMappings ] self.miniMappings.sort() self.save(self.miniMappings, '') if self.dump: self.cascadeMapdumpFile.close()
def generateMap (self) : startLineNr = 4 rawSections = [] for module in self.allModules: startLineNr += self.moduleCaptionSkip if module.rawMap: rawSections.append (collections.OrderedDict ([ ('offset', collections.OrderedDict ([ ('line', startLineNr), ('column', 0) ])), ('map', module.rawMap) ])) if module.metadata.mapDir != self.moduleDict [self.mainModuleName] .metadata.mapDir: shutil.copy (module.metadata.mapSourcePath, self.moduleDict [self.mainModuleName] .metadata.mapDir) startLineNr += module.targetCode.count ('\n') with utils.create (self.mapPath) as aFile: aFile.write (json.dumps (collections.OrderedDict ([ ('version', mapVersion), ('file', self.targetPath), ('sections', rawSections) ]), indent = '\t'))
def dumpDeltaMap(self, deltaMappings, infix): with utils.create( f'{self.targetDir}/{self.moduleName}{infix}.delta_map_dump' ) as deltaMapdumpFile: for group in deltaMappings: deltaMapdumpFile.write('(New group) ') for segment in group: deltaMapdumpFile.write('Segment: {}\n'.format(segment))
def compile (self): # Define names early, since they are cross-used in module compilation prefix = 'org.{}'.format (__base__.__envir__.transpilerName) self.coreModuleName = '{}.{}'.format (prefix, '__core__') self.baseModuleName = '{}.{}'.format (prefix, '__base__') self.standardModuleName = '{}.{}'.format (prefix, '__standard__') self.builtinModuleName = '{}.{}'.format (prefix, '__builtin__') self.mainModuleName = self.sourceFileName [ : -3] # Module compilation Module (self, ModuleMetadata (self, self.coreModuleName)) Module (self, ModuleMetadata (self, self.baseModuleName)) Module (self, ModuleMetadata (self, self.standardModuleName)) Module (self, ModuleMetadata (self, self.builtinModuleName)) try: moduleMetadata = ModuleMetadata (self, self.mainModuleName) Module (self, moduleMetadata) # Will trigger recursive compilation except Exception as exception: utils.enhanceException ( exception, message = str (exception) ) # Join all non-inline modules normallyImportedTargetCode = ''.join ([ self.moduleDict [moduleName] .targetCode for moduleName in sorted (self.moduleDict) if not moduleName in (self.coreModuleName, self.baseModuleName, self.standardModuleName, self.builtinModuleName, self.mainModuleName) ]) # And sandwich them between the in-line modules targetCode = ( self.header + 'function {} () {{\n'.format (self.mainModuleName) + self.moduleDict [self.coreModuleName].targetCode + self.moduleDict [self.baseModuleName] .targetCode + self.moduleDict [self.standardModuleName] .targetCode + self.moduleDict [self.builtinModuleName].targetCode + normallyImportedTargetCode + self.moduleDict [self.mainModuleName].targetCode + ' return __all__;\n' + '}\n' + 'window [\'{0}\'] = {0} ();\n'.format (self.mainModuleName) ) targetFileName = '{}/{}.js'.format ('{}/{}'.format (self.sourceDir, __base__.__envir__.targetSubDir), self.mainModuleName) utils.log (False, 'Saving result in: {}\n', targetFileName) with utils.create (targetFileName) as aFile: aFile.write (targetCode) miniFileName = '{}/{}/{}.min.js'.format (self.sourceDir, __base__.__envir__.targetSubDir, self.mainModuleName) utils.log (False, 'Saving minified result in: {}\n', miniFileName) if not utils.commandArgs.nomin: minify.run (targetFileName, miniFileName)
def dumpMap(self, mappings, infix, sourceExtension): with utils.create(f'{self.targetDir}/{self.moduleName}{infix}.map_dump' ) as mapdumpFile: mapdumpFile.write(f'mapVersion: {mapVersion}\n\n') mapdumpFile.write(f'targetPath: {self.moduleName}.js\n\n') mapdumpFile.write( f'sourcePath: {self.moduleName}{infix}{sourceExtension}\n\n') mapdumpFile.write('mappings:\n') for mapping in mappings: mapdumpFile.write('\t{}\n'.format(mapping))
def cascade(self, shrinkMap, miniMap): # Result in miniMap self.mappings.sort() self.cascadeMapdumpFile = utils.create(miniMap.cascadeMapdumpPath) miniMap.mappings = [self.getCascadedMapping(shrinkMapping) for shrinkMapping in shrinkMap.mappings] self.cascadeMapdumpFile.close() miniMap.sourcePaths = self.sourcePaths miniMap.sourceCodes = self.sourceCodes
def save(self): self.rawMappings = [] targetColumnShift = 0 sourceLineShift = 0 sourceColumnShift = 0 self.mappings.sort() self.deltaMappings = [] oldMapping = [-1, 0, 0, 0, 0] for mapping in self.mappings: newGroup = mapping[iTargetLine] != oldMapping[iTargetLine] if newGroup: self.deltaMappings.append([]) # Append new group self.deltaMappings[-1].append([]) # Append new segment, one for each mapping if newGroup: self.deltaMappings[-1][-1].append(mapping[iTargetColumn]) # Only target column reset for every group else: self.deltaMappings[-1][-1].append( mapping[iTargetColumn] - oldMapping[iTargetColumn] ) # Others are delta's, so cumulative for i in [iSourceIndex, iSourceLine, iSourceColumn]: self.deltaMappings[-1][-1].append(mapping[i] - oldMapping[i]) oldMapping = mapping self.rawMap = collections.OrderedDict( [ ("version", mapVersion), ("file", self.targetPath), ("sources", self.sourcePaths), ("sourcesContent", self.sourceCodes), ( "mappings", ";".join( [ ",".join([base64VlqConverter.encode(segment) for segment in group]) for group in self.deltaMappings ] ), ), ] ) with utils.create(self.mapPath) as mapFile: mapFile.write(json.dumps(self.rawMap, indent="\t")) if utils.commandArgs.dmap: self.dump()
def cascade(self, shrinkMap, miniMap): # Result in miniMap self.mappings.sort() self.cascadeMapdumpFile = utils.create(miniMap.cascadeMapdumpPath) miniMap.mappings = [ self.getCascadedMapping(shrinkMapping) for shrinkMapping in shrinkMap.mappings ] self.cascadeMapdumpFile.close() miniMap.sourcePaths = self.sourcePaths miniMap.sourceCodes = self.sourceCodes
def save(self): self.rawMappings = [] targetColumnShift = 0 sourceLineShift = 0 sourceColumnShift = 0 self.mappings.sort() self.deltaMappings = [] oldMapping = [-1, 0, 0, 0, 0] for mapping in self.mappings: newGroup = mapping[iTargetLine] != oldMapping[iTargetLine] if newGroup: self.deltaMappings.append([]) # Append new group self.deltaMappings[-1].append( []) # Append new segment, one for each mapping if newGroup: self.deltaMappings[-1][-1].append( mapping[iTargetColumn] ) # Only target column reset for every group else: self.deltaMappings[-1][-1].append( mapping[iTargetColumn] - oldMapping[iTargetColumn] ) # Others are delta's, so cumulative for i in [iSourceIndex, iSourceLine, iSourceColumn]: self.deltaMappings[-1][-1].append(mapping[i] - oldMapping[i]) oldMapping = mapping self.rawMap = collections.OrderedDict([ ('version', mapVersion), ('file', self.targetPath), ('sources', self.sourcePaths), ('sourcesContent', self.sourceCodes), ('mappings', ';'.join([ ','.join( [base64VlqConverter.encode(segment) for segment in group]) for group in self.deltaMappings ])) ]) with utils.create(self.mapPath) as mapFile: mapFile.write(json.dumps(self.rawMap, indent='\t')) if utils.commandArgs.dmap: self.dump()
def save(self, mappings, infix): deltaMappings = [] oldMapping = [-1, 0, 0, 0, 0] for mapping in mappings: newGroup = mapping[iTargetLine] != oldMapping[iTargetLine] if newGroup: deltaMappings.append([]) # Append new group deltaMappings[-1].append( []) # Append new segment, one for each mapping if newGroup: deltaMappings[-1][-1].append( mapping[iTargetColumn] ) # Only target column reset for every group else: deltaMappings[-1][-1].append( mapping[iTargetColumn] - oldMapping[iTargetColumn] ) # Others are delta's, so cumulative for i in [iSourceIndex, iSourceLine, iSourceColumn]: deltaMappings[-1][-1].append(mapping[i] - oldMapping[i]) oldMapping = mapping rawMap = collections.OrderedDict([ ('version', mapVersion), ('file', f'{self.moduleName}.js'), # Target ('sources', [f'{self.moduleName}{infix}.py']), # ('sourcesContent', [None]), ('mappings', ';'.join([ ','.join( [base64VlqConverter.encode(segment) for segment in group]) for group in deltaMappings ])) ]) with utils.create( f'{self.targetDir}/{self.moduleName}{infix}.map') as mapFile: mapFile.write(json.dumps(rawMap, indent='\t')) if self.dump: self.dumpMap(mappings, infix, '.py') self.dumpDeltaMap(deltaMappings, infix)
def dump (self): utils.log (False, 'Dumping syntax tree of module: {}\n', self.metadata.sourcePath) def walk (name, value, tabLevel): self.treeFragments .append ('\n{0}{1}: {2} '.format (tabLevel * '\t', name, type (value).__name__ )) if isinstance (value, ast.AST): for field in ast.iter_fields (value): walk (field [0], field [1], tabLevel + 1) elif isinstance (value, list): for element in value: walk ('element', element, tabLevel + 1) else: self.treeFragments.append ('= {0}'.format (value)) self.treeFragments = [] walk ('file', self.parseTree, 0) self.textTree = ''.join (self.treeFragments) [1:] with utils.create ('{}/{}.tree'.format (self.metadata.targetDir, self.metadata.filePrename)) as treeFile: treeFile.write (self.textTree)
def generateMap (self, fake = False): self.rawMap = collections.OrderedDict ([ ('version', mapVersion), ('file', self.metadata.targetPath), ('sources', [self.metadata.mapSourceFileName]), ('mappings', ( ';'.join (['AACA'] * self.targetCode.count ('\n')) ) if fake else ( ';'.join ([ 'AA{}A'.format (getBase64Vlq (sourceLineNrDelta)) # Adapted to the quirks of Google Chrome and source maps in general for sourceLineNrDelta in [ # Start with offset from second line w.r.t. first line self.sourceLineNrs [index + 1] - self.sourceLineNrs [index] for index in range (len (self.sourceLineNrs) - 1) # One entry less than the nr of lines ] ]) )) ]) with utils.create (self.metadata.mapPath) as aFile: aFile.write (json.dumps (self.rawMap, indent = '\t')) shutil.copyfile (self.metadata.sourcePath, self.metadata.mapSourcePath)
def generate (self): utils.log (False, 'Generating code for module: {}\n', self.metadata.targetPath) self.targetCode = ''.join (Generator (self) .targetFragments) with utils.create (self.metadata.targetPath) as targetFile: targetFile.write (self.targetCode)
def saveJavascript (self,): utils.log (False, 'Saving precompiled module: {}\n', self.metadata.targetPath) with utils.create (self.metadata.targetPath) as aFile: aFile.write (self.targetCode)