def testOk(self): """Tests that no problems are signaled in case all registers are valid and there are no references to nonexistent registers.""" proc = processor.Processor('test', '0') regBank = processor.RegisterBank('RB', 30, 32) proc.addRegBank(regBank) cpsrBitMask = {'N': (31, 31), 'Z': (30, 30), 'C': (29, 29), 'V': (28, 28), 'I': (7, 7), 'F': (6, 6), 'mode': (0, 4)} cpsr = processor.Register('CPSR', 32, cpsrBitMask) cpsr.setDefaultValue(0x000000D3) proc.addRegister(cpsr) regs = processor.AliasRegBank('REGS', 16, 'RB[0-15]') proc.addAliasRegBank(regs) abi = processor.ABI('REGS[0]', 'REGS[0-3]', 'RB[15]') abi.addVarRegsCorrespondence({'REGS[0-15]': (0, 15), 'CPSR': (25, 25)}) proc.setABI(abi) dataProc_imm_shift = isa.MachineCode([('cond', 4), ('zero', 3), ('opcode', 4), ('s', 1), ('rn', 4), ('rd', 4), ('shift_amm', 5), ('shift_op', 2), ('zero', 1), ('rm', 4)]) dataProc_imm_shift.setVarField('rn', ('REGS', 0)) dataProc_imm_shift.setVarField('rd', ('RB', 0)) dataProc_imm_shift.setVarField('rm', ('REGS', 0)) isaVar = isa.ISA() proc.setISA(isaVar) opCode = cxx_writer.Code('') adc_shift_imm_Instr = isa.Instruction('ADC_si', True) adc_shift_imm_Instr.setMachineCode(dataProc_imm_shift, {'opcode': [0, 1, 0, 1]}, 'TODO') isaVar.addInstruction(adc_shift_imm_Instr) # Call the check functions: They raise exceptions if there is a problem. proc.checkAliases() proc.checkABI() proc.isa.checkRegisters(processor.extractRegInterval, proc.isRegExisting)
def identify(): processor_obj = processor.Processor(files=files_to_check) # Saving individual matches is necessary for plotting them later processor_obj.process_bfm(save_matches=True, save_kps=True) # processor_obj.process_flann(return_matches=True, return_kps=True) # Salva os features pareados processor_obj.print_matches(save=False)
def part1(): proc = processor.Processor(program) wall = defaultdict(int) # we use a defaultdict rather than a list because we don't know which direction the painting robot is going to move. # If it moves left (or up) from the starting position, the coordinates will be negative. # If we tried to use these as an index of a list (or array) it would fail # The benefit of a defaultdict is that if we try to access a coordinate that has not been set up previously, the # program won't crash. It will just create the new item with a default value of 0 painted = set() # This will be a list of (unique) panels that have been painted currentPos = [0, 0] # we could start anywhere. 0,0 is arbitrary directions = ([0, -1], [1, 0], [0, 1], [-1, 0]) # These represent x,y coordinate changes for Up, Right, Down, Left direction = 0 # we start facing up (the first of the directions in the list on the previous line) paintWall(wall, currentPos, 0) # paint the current panel black while True: # keep going as long as the processor has not finished its program o = proc.run([readWall(wall, currentPos)]) # run the processor with the input of the current panel paintWall(wall, currentPos, o[0]) # o is the output, which will contain two values: The colour to paint and the direction to turn painted.add(str(currentPos[0]) + ":" + str(currentPos[1])) # add this panel coordinate to a set, so we can count the painted panels if o[1] == 0: # 0 means turn left, which means shifting one direction down in the directions list o[1]= -1 # But since adding zero to our direction would not do anything, we really want -1 direction = (direction+o[1]) % 4 # move either up or down the directions list currentPos[0] += directions[direction][0] # use the coordinate changes to affect the current position currentPos[1] += directions[direction][1] if proc.runState == 0: # the runstate of the processor is zero when it halts break print(len(painted))
def GetInputs(args): parser = argparse.ArgumentParser() parser.add_argument("sources", nargs=argparse.ONE_OR_MORE) parser.add_argument("-d", "--depends", nargs=argparse.ZERO_OR_MORE, default=[]) parser.add_argument("-e", "--externs", nargs=argparse.ZERO_OR_MORE, default=[]) opts = parser.parse_args(args) # TODO(twellington): resolve dependencies for multiple sources. if len(opts.sources) == 1: depends, externs = resolve_recursive_dependencies( os.path.normpath(os.path.join(os.getcwd(), opts.sources[0])), opts.depends, opts.externs) else: depends = opts.depends externs = set(opts.externs) files = set() for file in set(opts.sources) | set(depends) | externs: files.add(file) files.update(processor.Processor(file).included_files) return files
def check(self, source_file, out_file=None, depends=None, externs=None, runner_args=None, closure_args=None): """Closure compiler |source_file| while checking for errors. Args: source_file: A file to check. out_file: A file where the compiled output is written to. depends: Files that |source_file| requires to run (e.g. earlier <script>). externs: @extern files that inform the compiler about custom globals. runner_args: Arguments passed to runner.jar. closure_args: Arguments passed directly to the Closure compiler. Returns: (found_errors, stderr) A boolean indicating whether errors were found and the raw Closure compiler stderr (as a string). """ self._log_debug("FILE: %s" % source_file) if source_file.endswith("_externs.js"): self._log_debug("Skipping externs: %s" % source_file) return self._file_arg = source_file cwd, tmp_dir = os.getcwd(), tempfile.gettempdir() rel_path = lambda f: os.path.join(os.path.relpath(cwd, tmp_dir), f) depends = depends or [] includes = [rel_path(f) for f in depends + [source_file]] contents = ['<include src="%s">' % i for i in includes] meta_file = self._create_temp_file("\n".join(contents)) self._log_debug("Meta file: %s" % meta_file) self._processor = processor.Processor(meta_file) self._expanded_file = self._create_temp_file(self._processor.contents) self._log_debug("Expanded file: %s" % self._expanded_file) errors, stderr = self._run_js_check([self._expanded_file], out_file=out_file, externs=externs, runner_args=runner_args, closure_args=closure_args) filtered_errors = self._filter_errors(errors) cleaned_errors = map(self._clean_up_error, filtered_errors) output = self._format_errors(cleaned_errors) if cleaned_errors: prefix = "\n" if output else "" self._log_error("Error in: %s%s%s" % (source_file, prefix, output)) elif output: self._log_debug("Output: %s" % output) self._nuke_temp_files() return bool(cleaned_errors), stderr
def __init__(self): self.__title = "Chip-8 Emulator" self.__display = display.Display() self.__keyboard = keyboard.Keyboard() self.__processor = processor.Processor(self.__display, self.__keyboard) self.__isRunning = False self.__debugMode = True self.__display.setTitle(self.__title)
def setup(this): print ("\n-----Setting up!-----\n") this.pcs = processor.Processor() this.pcs.entities.setdefault("!persona",[persona.lower(),]) this.p = pyaudio.PyAudio() subprocess.call('~/Esther/bluetooth/autopair', shell=True) print ("-----Setup Finished!-----\n") print ("------------------------")
def GenerateCode(self, targetRegister): if self.parameters[0] == 'random': p = processor.Processor() randomRange = p.wordsize return 'bitFlip(%s, randomRange(%s))' % (targetRegister, randomRange) else: return 'bitFlip(%s, %s)' % (targetRegister, self.parameters[0])
def __init__(self, data_frames, options): self.options = options self.data_frames = data_frames self.data_processor = processor.Processor() # map plotting functions self.plot_functions = { plot_options.valid_modes['highs']: self.__plot_highs__ }
def get_processor(mode): if int(mode) == 1: return processor.Processor() elif int(mode) == 2: return processor.Highlight() elif int(mode) == 3: return processor.Cookbook() elif int(mode) == 4: return processor.Instructions() elif int(mode) == 5: return processor.Stockphoto() elif int(mode) == 6: return processor.TreeOfCodes() elif int(mode) == 7: return processor.TVL() elif int(mode) == 8: return processor.TGP()
def main(): setup_logging() if CONSOLE_ARGS.apikey: refresh_all() else: logging.info('Beginning rom processing') p = processor.Processor() p.make_top_list() return
def testABIReg(self): """Tests that an exception is raised in case the ABI refers to a non existing register""" proc = processor.Processor('test', '0') regBank = processor.RegisterBank('RB', 30, 32) proc.addRegBank(regBank) cpsrBitMask = { 'N': (31, 31), 'Z': (30, 30), 'C': (29, 29), 'V': (28, 28), 'I': (7, 7), 'F': (6, 6), 'mode': (0, 4) } cpsr = processor.Register('CPSR', 32, cpsrBitMask) cpsr.setDefaultValue(0x000000D3) proc.addRegister(cpsr) regs = processor.AliasRegBank('REGS', 16, 'RB[0-15]') proc.addAliasRegBank(regs) PC = processor.AliasRegister('PC', 'REGS[15]') proc.addAliasReg(PC) abi = processor.ABI('REGS[0]', 'REGS[0-3]', 'RB[15]') abi.addVarRegsCorrespondence({ 'REGS[0-15]': (0, 15), 'UNEXISTING': (25, 25) }) proc.setABI(abi) dataProc_imm_shift = isa.MachineCode([('cond', 4), ('zero', 3), ('opcode', 4), ('s', 1), ('rn', 4), ('rd', 4), ('shift_amm', 5), ('shift_op', 2), ('zero', 1), ('rm', 4)]) dataProc_imm_shift.setVarField('rn', ('REGS', 0)) dataProc_imm_shift.setVarField('rd', ('RB', 0)) dataProc_imm_shift.setVarField('rm', ('REGS', 0)) isaVar = isa.ISA() proc.setISA(isaVar) opCode = cxx_writer.writer_code.Code('') adc_shift_imm_Instr = isa.Instruction('ADC_si', True) adc_shift_imm_Instr.setMachineCode(dataProc_imm_shift, {'opcode': [0, 1, 0, 1]}, 'TODO') isaVar.addInstruction(adc_shift_imm_Instr) # The I call the check functions: they raise exceptions in case # there is a problem foundError = False proc.isa.checkRegisters(processor.extractRegInterval, proc.isRegExisting) proc.checkAliases() try: proc.checkABI() except: foundError = True self.assert_(foundError)
def GenerateCode(self, targetRegister): if self.parameters[0] == 'random': p = processor.Processor() randomRange = p.wordsize print randomRange return 'stuckAt(%s, randomRange(%s), %s)' % ( targetRegister, randomRange, self.parameters[1]) else: return 'stuckAt(%s, %s, %s)' % (targetRegister, self.parameters[0], self.parameters[1])
def js_files_and_deps_in_dir(js_dir): found_files = set() for root, dirs, files in os.walk(js_dir): abs_files = [os.path.abspath(os.path.join(root, f)) for f in files] relevant_files = filter(_RELEVANT_JS, abs_files) found_files.update(relevant_files) for f in relevant_files: found_files.update(processor.Processor(f).included_files) return found_files
def run(program_file): """ Program to run :param program: the path to the program file """ proc = processor.Processor() program = get_program_from_file(program_file=program_file) for line in program: line = line.strip() proc.perform_instruction(line) proc.dump_state()
def find_rom_list_folder(self): """Gets the list of games to compare against file list.""" try: p = processor.Processor() for c in p.config_data: if c['platformID'] == self.platform_id: return c['folder'] except Exception as e: logging.exception(str(e)) raise return None
def get_largest_franchise_ids(self): """ Uses the list of games to find the highest occuring franchise IDs""" franchise_ids_list = [] p = processor.Processor() g = games.Games(self.platform_id) game_list = g.get_top_game_list() for game in game_list: if 'franchise' in game.keys(): franchise_ids_list.append(game['franchise']) sorted_list = sorted(set(franchise_ids_list), key=lambda x: -franchise_ids_list.count(x)) logging.info('Identified {} franchises for {} platform'.format(len(sorted_list), self.platform_name)) return sorted_list
def check(self, source_file, depends=None, externs=None): """Closure compile a file and check for errors. Args: source_file: A file to check. depends: Other files that would be included with a <script> earlier in the page. externs: @extern files that inform the compiler about custom globals. Returns: (has_errors, output) A boolean indicating if there were errors and the Closure compiler output (as a string). """ depends = depends or [] externs = externs or set() if not self._check_java_path(): return 1, "" self._debug("FILE: %s" % source_file) if source_file.endswith("_externs.js"): self._debug("Skipping externs: %s" % source_file) return self._file_arg = source_file tmp_dir = tempfile.gettempdir() rel_path = lambda f: os.path.join(os.path.relpath(os.getcwd(), tmp_dir), f) includes = [rel_path(f) for f in depends + [source_file]] contents = ['<include src="%s">' % i for i in includes] meta_file = self._create_temp_file("\n".join(contents)) self._debug("Meta file: %s" % meta_file) self._processor = processor.Processor(meta_file) self._expanded_file = self._create_temp_file(self._processor.contents) self._debug("Expanded file: %s" % self._expanded_file) errors, stderr = self.run_js_check([self._expanded_file], externs) # Filter out false-positive promise chain errors. # See https://github.com/google/closure-compiler/issues/715 for details. errors = self._error_filter.filter(errors); output = self._format_errors(map(self._fix_up_error, errors)) if errors: prefix = "\n" if output else "" self._error("Error in: %s%s%s" % (source_file, prefix, output)) elif output: self._debug("Output: %s" % output) return bool(errors), output
def monitor(self): self.generateMachineKeys() self.dbop = dbOperator.DBOperator(self.ldap, self.product, self.type, self.cubename, self.numberKeys, self.stringKeys, self.machineKeys) self.dbLock = threading.RLock() if self.logfile != '': self.statisticTool = processor.Processor( self.numberKeys + self.stringKeys, self.timeInterval, self.beforeDays) threading.Thread(target=self.logHandler).start() if len(self.processes) != 0: threading.Thread(target=self.processHandler).start()
def __init__(self, socket, client): ''' Initializes a handler and gets log. :param socket: Socket on which communication takes place. :type socket: socket :param client: Client address containing IP and port. :type client: tuple ''' log.info("Accepting connection from %s on %s" % (client, socket)) server.Handler.__init__(self, socket, client) log.info("Accepted connection from %s on %s" % (client, self)) self._processor = processor.Processor()
def __init__(self, main_config, config_parser): try: self.program_file_name = (inp.convert_string_to_string( config_parser["Program"]["program_file_name"])) except (KeyError): raise Exception("Obsolete, use legacy programs") with open(self.program_file_name, "r") as f: program_text = f.read() self.program = (processor.Processor( program_text, main_config.variables_config.variables)) return
def automation(): d = str(date.today()) t = datetime.now() #Create a massive dataframe for processing x = processor.Processor() df = x.mergeLabels() y = scraper.Scraper() #Seperate dataframes for pre today and today df_test = y.getNewHeadlines().drop_duplicates(keep='first') df1 = df[df['date'] == d].drop_duplicates(ignore_index=True) df = df[~df['date'].str.contains(d)] #Vectorizes your phrases and creates your classifier counter = CountVectorizer(ngram_range=(2, 3)) classifier = MultinomialNB() counter.fit(df['title'] + df['description']) training_counts = counter.transform(df['title']) labels = df['label'] #The vectorized counts of the headlines in df_test headline_counts = counter.transform(df_test['title']) headline_counts_ticker = counter.transform(df1['title']) #Training the model classifier.fit(training_counts, labels) prediction = classifier.predict(headline_counts) prediction1 = classifier.predict(headline_counts_ticker) chance = 100 * sum(prediction) / len(prediction) chanceticker = 100 * sum(prediction1) / len(prediction1) totalChance = (chance + chanceticker) / 2 print('Chances of market going up tomorrow: {0:.2f}%'.format(totalChance)) print('New Headline Chances: {0:.2f}%'.format(chance)) print('Ticker Headline Chances: {0:.2f}%'.format(chanceticker)) print('Prediction New Headline Length: {}'.format( np.size(classifier.predict_proba(headline_counts), 0))) print('Prediction Ticker Headline Length: {}'.format( np.size(classifier.predict_proba(headline_counts_ticker), 0))) with open('predictions/predictionsForTomorrow.csv', 'a', newline='') as currentCSV: writer = csv.writer(currentCSV) writer.writerow([d, t, totalChance])
def part2(): proc = processor.Processor(program) wall = defaultdict(int) currentPos = [0, 0] directions = ([0, -1], [1, 0], [0, 1], [-1, 0]) direction = 0 paintWall(wall, currentPos, 1) # the only real difference is that we paint the starting panel white while True: o = proc.run([readWall(wall, currentPos)]) paintWall(wall, currentPos, o[0]) if o[1] == 0: o[1]= -1 direction = (direction+o[1])%4 currentPos[0] += directions[direction][0] currentPos[1] += directions[direction][1] if proc.runState==0: break printWall(wall) # and we paint the wall at the end
def GetInputs(args): parser = argparse.ArgumentParser() parser.add_argument("sources", nargs=argparse.ONE_OR_MORE) parser.add_argument("-d", "--depends", nargs=argparse.ZERO_OR_MORE, default=[]) parser.add_argument("-e", "--externs", nargs=argparse.ZERO_OR_MORE, default=[]) opts = parser.parse_args(args) files = set() for file in opts.sources + opts.depends + opts.externs: files.add(file) files.update(processor.Processor(file).included_files) return files
def run(): args = process_args() importer = data_importer.DataImporter() data_processor = processor.Processor() data_frames = [] for coin in args.coins: data_frames.append(importer.get_data(coin)) if args.info_type: results = data_processor.process_simple(data_frames) for i in range(0, len(args.coins)): print(args.coins[i], 'Info:\n' + str(results[i])) if args.plot_type: options = plot_options.PlotOptions(args.coins, args.plot_type, args.show, (args.no_save == False), args.date_from, args.date_to) plot = plotter.Plotter(data_frames, options) plot.plot_results()
def GetInputs(args): parser = argparse.ArgumentParser() parser.add_argument("source", nargs=1) parser.add_argument("-d", "--depends", nargs=argparse.ZERO_OR_MORE, default=[]) parser.add_argument("-e", "--externs", nargs=argparse.ZERO_OR_MORE, default=[]) opts = parser.parse_args(args) source = opts.source[0] depends, externs = resolve_recursive_dependencies( source, opts.depends, opts.externs) files = set() for file in {source} | set(depends) | externs: files.add(file) files.update(processor.Processor(file).included_files) return files
parser.add_argument( "--dumb-debug", help="adds setwindow commands before each line to help with debugging", action="store_true") parser.add_argument( "--flags", help="a list of semicolon separated values to use as flag macros") parser.add_argument( "--cull", help="removes empty rows and comments after processing", action="store_true") parser.add_argument("--minify", help="removes unnecessary whitespace", action="store_true") parser.add_argument("--anticrap", help="remove the \ and . idiocy", action="store_true") parser.add_argument("--nomacros", help="do not expand any macros", action="store_true") parser.add_argument("-d", "--filedir", help="file directory") return parser parser = getParser() args = parser.parse_args() proc = processor.Processor(args) proc.process()
def main(): line_cache = {} def js_files_and_deps_in_dir(js_dir): found_files = set() for root, dirs, files in os.walk(js_dir): abs_files = [os.path.abspath(os.path.join(root, f)) for f in files] relevant_files = filter(_RELEVANT_JS, abs_files) found_files.update(relevant_files) for f in relevant_files: found_files.update(processor.Processor(f).included_files) return found_files def num_lines(f): f = os.path.abspath(f) if f not in line_cache: line_cache[f] = len(open(f, 'r').read().splitlines()) return line_cache[f] # All the files that are already compiled. compiled = set() closure_dir = os.path.join(_HERE, '..') root_gyp = os.path.join(closure_dir, 'compiled_resources.gyp') root_contents = open(root_gyp, 'r').read() gyp_files = literal_eval(root_contents)['targets'][0]['dependencies'] for g in gyp_files: gyp_file = os.path.join(closure_dir, g.replace(':*', '')) targets = literal_eval(open(gyp_file, 'r').read())['targets'] for target in targets: gyp_dir = os.path.dirname(gyp_file) target_file = os.path.join(gyp_dir, target['target_name'] + '.js') compiled.add(os.path.abspath(target_file)) compiled.update(processor.Processor(target_file).included_files) if 'variables' in target and 'depends' in target['variables']: depends = target['variables']['depends'] rel_depends = [os.path.join(gyp_dir, d) for d in depends] compiled.update([os.path.abspath(d) for d in rel_depends]) compiled_lines = sum(map(num_lines, compiled)) print 'compiled: %d files, %d lines' % (len(compiled), compiled_lines) # Find and calculate the line count of all .js files in the wanted or needed # resource directories. files = set() for n in _NEED_TO_COMPILE: files.update(js_files_and_deps_in_dir(n)) need_lines = sum(map(num_lines, files)) print 'need: %d files, %d lines' % (len(files), need_lines) need_done = float(compiled_lines) / need_lines * 100 print '%.2f%% done with the code we need to compile' % need_done for w in _WANT_TO_COMPILE: files.update(js_files_and_deps_in_dir(w)) want_lines = sum(map(num_lines, files)) print 'want: %d files, %d lines' % (len(files), want_lines) want_done = float(compiled_lines) / want_lines * 100 print '%.2f%% done with the code we want to compile' % want_done
handlers = [console_handler] logging.basicConfig(level = logging.INFO, format = '[%(levelname)s] [%(module)10s] %(message)s', handlers = handlers ) NUM_PEERS = 2 SIM_DURATION = 300 # create env env = simpy.Environment() # network net = network.Network(env,2) #create peers nodes = [] teste = env.timeout(200) for i in range (NUM_PEERS): proc = processor.Processor(env, i, 3) dri = driver.Driver(net, proc) new_peer = peer.Peer(dri, i) nodes.append(new_peer) env.process(dri.run()) env.run(until=SIM_DURATION)
def run(self, sources, out_file, closure_args=None, custom_sources=False, custom_includes=False): """Closure compile |sources| while checking for errors. Args: sources: Files to compile. sources[0] is the typically the target file. sources[1:] are externs and dependencies in topological order. Order is not guaranteed if custom_sources is True. out_file: A file where the compiled output is written to. closure_args: Arguments passed directly to the Closure compiler. custom_sources: Whether |sources| was customized by the target (e.g. not in GYP dependency order). custom_includes: Whether <include>s are processed when |custom_sources| is True. Returns: (found_errors, stderr) A boolean indicating whether errors were found and the raw Closure compiler stderr (as a string). """ is_extern = lambda f: 'externs' in f externs_and_deps = [self._POLYMER_EXTERNS] if custom_sources: if custom_includes: # TODO(dbeam): this is fairly hacky. Can we just remove custom_sources # soon when all the things kept on life support using it die? self._target = sources.pop() externs_and_deps += sources else: self._target = sources[0] externs_and_deps += sources[1:] externs = filter(is_extern, externs_and_deps) deps = filter(lambda f: not is_extern(f), externs_and_deps) assert externs or deps or self._target self._log_debug("Externs: %s" % externs) self._log_debug("Dependencies: %s" % deps) self._log_debug("Target: %s" % self._target) js_args = deps + ([self._target] if self._target else []) process_includes = custom_includes or not custom_sources if process_includes: # TODO(dbeam): compiler.jar automatically detects "@externs" in a --js arg # and moves these files to a different AST tree. However, because we use # one big funky <include> meta-file, it thinks all the code is one big # externs. Just use --js when <include> dies. cwd, tmp_dir = os.getcwd(), tempfile.gettempdir() rel_path = lambda f: os.path.join(os.path.relpath(cwd, tmp_dir), f) contents = ['<include src="%s">' % rel_path(f) for f in js_args] meta_file = self._create_temp_file("\n".join(contents)) self._log_debug("Meta file: %s" % meta_file) self._processor = processor.Processor(meta_file) self._expanded_file = self._create_temp_file( self._processor.contents) self._log_debug("Expanded file: %s" % self._expanded_file) js_args = [self._expanded_file] closure_args = closure_args or [] closure_args += ["summary_detail_level=3", "continue_after_errors"] args = ["--externs=%s" % e for e in externs] + \ ["--js=%s" % s for s in js_args] + \ ["--%s" % arg for arg in closure_args] assert out_file out_dir = os.path.dirname(out_file) if not os.path.exists(out_dir): os.makedirs(out_dir) checks_only = 'checks_only' in closure_args if not checks_only: args += ["--js_output_file=%s" % out_file] self._log_debug("Args: %s" % " ".join(args)) return_code, stderr = self.run_jar(self._compiler_jar, args) errors = stderr.strip().split("\n\n") maybe_summary = errors.pop() summary = re.search("(?P<error_count>\d+).*error.*warning", maybe_summary) if summary: self._log_debug("Summary: %s" % maybe_summary) else: # Not a summary. Running the jar failed. Bail. self._log_error(stderr) self._nuke_temp_files() sys.exit(1) if summary.group('error_count') != "0": if os.path.exists(out_file): os.remove(out_file) elif checks_only and return_code == 0: # Compile succeeded but --checks_only disables --js_output_file from # actually writing a file. Write a file ourselves so incremental builds # still work. with open(out_file, 'w') as f: f.write('') if process_includes: errors = map(self._clean_up_error, errors) output = self._format_errors(errors) if errors: prefix = "\n" if output else "" self._log_error("Error in: %s%s%s" % (self._target, prefix, output)) elif output: self._log_debug("Output: %s" % output) self._nuke_temp_files() return bool(errors) or return_code > 0, stderr