def _parse_configuration_export(self, it): ret = [] token = it.next() if token == lexer.Token.OPEN_PARENTHESIS: while True: token = it.next() if token in [lexer.Token.LITERAL, lexer.Token.VARIABLE]: value = self._token_to_variable(token) token = it.next() if token == lexer.Token.COLON: token = it.next() if token == lexer.Token.VARIABLE: variable = variables.Variable(self.name, token.content, value) ret.append(variable) else: ui.parse_error(token, msg="expected variable") else: ui.parse_error(token, msg="expected colon") elif token == lexer.Token.CLOSE_PARENTHESIS: break else: ui.parse_error(token) else: ui.parse_error(token) ui.debug("colon list: " + str(ret)) return ret
def __parse_configuration(self, it): configuration = configurations.Configuration() # name token = it.next() if token == lexer.Token.LITERAL: configuration.name = token.content else: ui.parse_error(token) while True: token = it.next() if token == lexer.Token.LITERAL: if token.content == "compiler": configuration.compiler = self.__parse_list(it) elif token.content == "archiver": configuration.archiver = self.__parse_list(it) elif token.content == "application_suffix": configuration.application_suffix = self.__parse_list(it) elif token.content == "compiler_flags": configuration.compiler_flags = self.__parse_list(it) elif token.content == "linker_flags": configuration.linker_flags = self.__parse_list(it) elif token.content == "export": configuration.export = self._parse_configuration_export(it) else: ui.parse_error(token) elif token == lexer.Token.NEWLINE: break else: ui.parse_error(token) ui.debug("configuration parsed:" + str(configuration)) configurations.add_configuration(configuration)
def main(user_args): targets = [] api_keys = get_config_from_file(user_args) ui.info_section("\n", ui.darkteal, "Targets") user_stdin_target = fetch_emails(args.target_emails) if user_stdin_target: targets.append(user_stdin_target) elif os.path.isfile(user_args.target_emails): ui.debug(ui.darkgray, "Reading from file", user_args.target_emails) targets.extend(get_emails_from_file(user_args.target_emails)) else: ui.warning("No targets found") # Launch if not user_args.run_local: breached_targets = target_factory(targets, api_keys) elif user_args.run_local: breached_targets = [Target(t) for t in targets] if user_args.bc_path: breached_targets = breachcomp_check(breached_targets, user_args.bc_path) print_results(breached_targets) if user_args.output_file: save_results_csv(user_args.output_file, breached_targets)
def _parse_configuration_export(self, it): ret = [] token = it.next() if token == lexer.Token.OPEN_PARENTHESIS: while True: token = it.next() if token in [lexer.Token.LITERAL, lexer.Token.VARIABLE]: value = self._token_to_variable(token) token = it.next() if token == lexer.Token.COLON: token = it.next() if token == lexer.Token.VARIABLE: variable = variables.Variable( self.name, token.content, value) ret.append(variable) else: ui.parse_error(token, msg="expected variable") else: ui.parse_error(token, msg="expected colon") elif token == lexer.Token.CLOSE_PARENTHESIS: break else: ui.parse_error(token) else: ui.parse_error(token) ui.debug("colon list: " + str(ret)) return ret
def _parse_command_line(): parser = argparse.ArgumentParser(description='Painless buildsystem.') parser.add_argument('target', metavar='target', nargs="*", help='targets to be built') parser.add_argument('-a', '--all', action="store_true", help='build all targets') parser.add_argument('-c', action='store', dest='configuration', default="__default", nargs="?", help='configuration to be used') parser.add_argument('-j', action='store', dest='jobs', default="1", nargs="?", help='parallel jobs to be used') parser.add_argument('-v', '--verbose', action="store_true", help='show tool invokations') args = parser.parse_args() ui.debug(str(args)) return args
def run_git(working_path, *cmd, raises=True): """ Run git `cmd` in given `working_path` If `raises` is True and git return code is non zero, raise an exception. Otherwise, return a tuple (returncode, out) """ git_cmd = list(cmd) git_cmd.insert(0, "git") options = dict() if not raises: options["stdout"] = subprocess.PIPE options["stderr"] = subprocess.STDOUT ui.debug(ui.lightgray, working_path, "$", ui.reset, *git_cmd) process = subprocess.Popen(git_cmd, cwd=working_path, **options) if raises: process.wait() else: out, _ = process.communicate() out = out.decode("utf-8") returncode = process.returncode if raises: if returncode != 0: raise GitCommandError(working_path, cmd) else: if out.endswith('\n'): out = out.strip('\n') ui.debug(ui.lightgray, "[%i]" % returncode, ui.reset, out) return returncode, out
def storeD(): '''store the light level in the output file, cropping if necessary''' global sensordata, watchdog watchdog.reset() # Reset the watchdog light_filetmp = cfg.light_filename + ".tmp" # Temporary file try: with open(cfg.light_filename, 'r') as fin: data = fin.read().splitlines( True) # Read in existing data if present except FileNotFoundError: data = [] if len(data) >= cfg.light_keepreadings: data = data[-cfg.light_keepreadings:] # data.append(str(int(sum(sensordata)/len(sensordata))) + '\n') # Append the new entry # print("storing {}".format(int(sum(sensordata)/len(sensordata)))) if len(sensordata) > 0: sensestore = cfg.light_normalise / (sum(sensordata) / len(sensordata)) data.append(str(sensestore) + '\n') # Append the new entry # print("storing {}".format(cfg.light_normalise/(sum(sensordata)/len(sensordata)))) with open(light_filetmp, 'w') as fout: fout.writelines(data) # Store entries in temporary file os.rename(light_filetmp, cfg.light_filename ) # Move the temporary file to the non-temporary one debug('Storing light sensor average {}'.format(sensestore), 2) if cfg.mqtt_engine: mqq.put(('lightSense', {'brightness': format(sensestore)})) sensordata = []
def make_request(self, url, cf=False, meth="GET", timeout=30, redirs=True, data=None, params=None): if cf is False: try: response = requests.request(url=url, headers=self.headers, method=meth, timeout=timeout, allow_redirects=redirs, data=data, params=params) ui.debug(ui.purple, "REQUEST", response.url, ui.bold, response.status_code) except Exception as ex: ui.error("Request could not be made for", self.email, ex) else: # cf is True try: import cfscrape scraper = cfscrape.create_scraper() response = scraper.get(url) except Exception as ex: ui.error("Cloudflare bypass request could not be made for", self.email, ex) if response.status_code == 429: ui.warning("Reached RATE LIMIT, sleeping", ui.purple, self.email) sleep(2.5) return response
def append(module_name, name, value): if name not in modules[module_name]: modules[module_name][name] = Variable(module_name, name) variable = modules[module_name][name] variable.content.append(value) ui.debug("setting variable: {!s}".format(variable))
def __parse(self): it = iter(self.tokens) try: if not self.__parse_directive(it): ui.parse_error(msg="unknown :(") except StopIteration: ui.debug("eof")
def is_newer_than(prerequisite, target): if os.path.isfile(target) and os.path.isfile(prerequisite): ret = get_mtime(prerequisite) > get_mtime(target) ui.debug("is {} newer than {} = {!s}".format(prerequisite, target, ret)) return ret else: ui.debug(target + " doesn't exist, treating like older") return True
def dbD(): '''database access thread ''' debug('Database thread initialising') # if cfg.ui_debugmod['db']: uiq.put(('Database thread initialising', 'DEBUG')) dbopen() while True: item = dbq.get() dbstorebeat(*item)
def is_newer_than(prerequisite, target): if os.path.isfile(target): ret = get_mtime(prerequisite) > get_mtime(target) ui.debug("is {} newer than {} = {!s}".format(prerequisite, target, ret)) return ret else: ui.debug(target + " doesn't exist, treating like older") return True
def build(self, configuration): evaluated_artefacts = self.common_parameters.artefacts.eval() evaluated_prerequisites = self.common_parameters.prerequisites.eval() if not evaluated_artefacts or not evaluated_prerequisites: ui.warning(("target {} has no artifacts or prerequisites defined, " "this means, whatever is defined in run_before or run_after " "will be always executed") .format(self.common_parameters.name)) ui.debug("phony build")
def build(self, configuration): evaluated_artefacts = self.common_parameters.artefacts.eval() evaluated_prerequisites = self.common_parameters.prerequisites.eval() if not evaluated_artefacts or not evaluated_prerequisites: ui.warning( ("target {} has no artifacts or prerequisites defined, " "this means, whatever is defined in run_before or run_after " "will be always executed").format( self.common_parameters.name)) ui.debug("phony build")
def get_hunterio_private(self, api_key): try: ui.debug(self.email, "Getting HunterIO private data on domain") url = "https://api.hunter.io/v2/domain-search?domain={target}&api_key={key}".format( target=self.hostname, key=api_key) req = self.make_request(url, cf=True) response = req.json() for e in response["data"]["emails"]: self.hunterio_mails.append(e["value"]) except Exception as ex: ui.warning(ui.yellow, "HunterIO (private API) error:", self.email, ex, url)
def __scan_includes(self, in_filename, include_dirs, compiler_flags): ui.debug("scanning includes for " + in_filename) try: flags = self.__prepare_compiler_flags(include_dirs, compiler_flags) out = shell.execute(" ".join( [configurations.compiler(), flags, "-M", in_filename]), capture_output=True).split() except Exception as e: raise Exception("error while building dependency graph for" "{!s}, {!s}".format(in_filename, e)) return [token for token in out[2:] if not token == "\\"]
def _build_object(self, sem, toolchain, name, object_file, source, include_dirs, compiler_flags): try: if self.error: return with sem: toolchain.build_object(name, object_file, source, include_dirs, compiler_flags) except Exception as e: ui.debug("catched during compilation {!s}".format(e)) self.error_reason = str(e) self.error = True
def get_pastebin(self): try: ui.debug(self.email, "Getting pastebin private data") url = "https://www.googleapis.com/customsearch/v1element?key=AIzaSyCVAXiUzRYsML1Pv6RwSG1gunmMikTzQqY&rsz=filtered_cse&num=10&hl=en&prettyPrint=false&source=gcsc&gss=.com&cx=013305635491195529773:0ufpuq-fpt0&q=%22{email}%22&cse_tok=AF14hljK02r_xNhCjU6rz69O3mWFxQgr0w:1534517796222&sort=&googlehost=www.google.com".format( email=self.email) req = self.make_request(url, cf=False) response = req.json() for e in response["results"]: self.pastebin_urls.append(e["formattedUrl"]) except Exception as ex: ui.warning(ui.yellow, "Google api error for pastebin.com:", self.email, ex, url)
def get_hunterio_public(self): try: ui.debug(self.email, "Getting HunterIO public data on domain") url = "https://api.hunter.io/v2/email-count?domain={}".format( self.hostname) req = self.make_request(url, cf=True) response = req.json() if response["data"]["total"] != 0: self.related_emails = response["data"]["total"] except Exception as ex: ui.warning(ui.yellow, "HunterIO (pubic API) error:", self.email, ex)
def execute(command, capture_output=False): out = '' try: if capture_output: out = subprocess.check_output(command, shell=True) else: subprocess.check_call(command, shell=True) except subprocess.CalledProcessError as e: raise Exception("command exited with error({}): {}".format( str(e.returncode), command)) ui.debug("command completed: " + command) return out
def export_special_variables(configuration): ui.debug("exporting special variables") with ui.ident: add_empty("__configuration", "$__null") add("__configuration", "$__name", configuration.name) for variable in configuration.export: add("__configuration", variable.name, variable) for module in modules: add(module, "$__build", fsutils.build_dir(configuration.name))
def fullcap(a, b, c): '''the capacitor has charged enough to pull the pin high''' global timenow, sensordata delta = c - timenow # Stop the counter print(delta) # if delta < 0: delta += 4294967295 # Counter wrapped if delta < 0: draincap() return debug('Light sensor read took {} ticks'.format(delta), 3) sensordata.append(delta) time.sleep(cfg.light_read_interval) # Wait for the next reading draincap() # Drain the cap to prime the next reading return
def envD(pig): '''environmental data thread (pig = pigpiod handle)''' debug('Environmental data thread initialising') if cfg.env_frequency - 3 * cfg.env_delay < 0: uiq.put(('ERROR: env_frequency too low (must be at least {})'.format( 3 * cfg.env_delay), 'ERR')) return while True: globs.temperature, globs.humidity = temphumid(pig) debug( 'Read temperature {} and humidity {}'.format( globs.temperature, globs.humidity), 2) time.sleep(cfg.env_frequency - 3 * cfg.env_delay)
def link_application(self, out_filename, in_filenames, link_with, library_dirs): if fsutils.is_any_newer_than( in_filenames, out_filename) or self.__are_libs_newer_than_target( link_with, out_filename): ui.debug("linking application") ui.debug(" files: " + str(in_filenames)) ui.debug(" with libs: " + str(link_with)) ui.debug(" lib dirs: " + str(library_dirs)) parameters = " ".join("-L " + lib_dir for lib_dir in library_dirs) ui.bigstep("linking", out_filename) try: shell.execute(" ".join([ configurations.compiler(), configurations.linker_flags(), "-o", out_filename, " ".join(in_filenames), self.__prepare_linker_flags(link_with), parameters ])) except Exception as e: ui.fatal("cannot link {}, reason: {!s}".format( out_filename, e)) else: ui.bigstep("up to date", out_filename)
def __try_run(self, cmds): root_dir = os.getcwd() os.chdir(self.common_parameters.root_path) evaluated_artefacts = self.common_parameters.artefacts.eval() evaluated_prerequisites = self.common_parameters.prerequisites.eval() should_run = True if evaluated_prerequisites and evaluated_artefacts: should_run = False ui.debug("checking prerequisites ({!s}) for making {!s}".format( evaluated_prerequisites, evaluated_artefacts)) for artefact in evaluated_artefacts: ui.debug(" " + artefact) if fsutils.is_any_newer_than(evaluated_prerequisites, artefact): ui.debug(("going on because {!s}" "needs to be rebuilt").format(artefact)) should_run = True break if should_run: variables.pollute_environment(self.common_parameters.module_name) evaluated_cmds = cmds.eval() for cmd in evaluated_cmds: ui.debug("running {!s}".format(cmd)) shell.execute(cmd) os.chdir(root_dir)
def __fetch_includes(self, target_name, in_filename, include_dirs, compiler_flags): ui.debug("getting includes for " + in_filename) with ui.ident: cache_file = self.cache_directory(target_name) + in_filename + ".includes" includes = None if os.path.exists(cache_file) and fsutils.is_newer_than(cache_file, in_filename): includes = marshal.load(open(cache_file, "rb")) else: fsutils.mkdir_recursive(os.path.dirname(cache_file)); includes = self.__scan_includes(in_filename, include_dirs, compiler_flags) marshal.dump(includes, open(cache_file, "wb")) return includes
def wait_until_something_changes(): mkdir_recursive(BUILD_ROOT) touch_file=os.path.join(BUILD_ROOT, "ci.touch") with open(touch_file, "w") as f: f.write("don't bother about this file :)") while True: time.sleep(1) for dirpath, _, filenames in os.walk(os.getcwd()): for filename in filenames: if is_newer_than(os.path.join(dirpath, filename), touch_file): ui.debug("found change: {}".format(filename)) return
def __scan_includes(self, in_filename, include_dirs, compiler_flags): ui.debug("scanning includes for " + in_filename) try: flags = self.__prepare_compiler_flags(include_dirs, compiler_flags) out = shell.execute(" ".join([configurations.compiler(), flags, "-M", in_filename]), capture_output=True).split() except Exception as e: raise Exception("error while building dependency graph for" "{!s}, {!s}".format(in_filename, e)) def is_system_include(filename): return filename.startswith("/usr/include") or filename.startswith("/usr/lib") return [token for token in out[2:] if not token == "\\" and not is_system_include(token)]
def mqttD(): '''MQTT broker thread''' debug('MQTT thread initialising') global mqclient, watchdog, db2x mqclient = mqtt.Client() mqclient.connect(cfg.mqtt_broker, cfg.mqtt_port, cfg.mqtt_keepalive) mqclient.loop_start() if cfg.mqtt_telemetry: # Set a watchdog for publishing telemetry data watchdog = Watchdog(cfg.mqtt_telemetry_interval, mqttTelemetry) while True: item = mqq.get() payload = json.dumps(item[1]) mqclient.publish('{}/{}'.format(cfg.mqtt_topicbase, item[0]), payload=payload, qos=0, retain=False) debug('MQTT publish: {}/{}: {}'.format(cfg.mqtt_topicbase, item[0], item[1]), 3)
def __init__(self, filename): assert isinstance(filename, str) ui.debug("parsing " + filename) with ui.ident: self.filename = filename self.name = self.__get_module_name(filename) self.tokens = lexer.parse(filename) self.__parse() variables.add(self.name, "$__path", os.path.dirname(filename)) variables.add_empty(self.name, "$__null")
def execute(command, capture_output = False): global _counter _counter += 1 ui.debug("running {!s}: {!s}".format(_counter, command)) out = '' try: if capture_output: out = subprocess.check_output(command, shell=True) else: subprocess.check_call(command, shell=True) except subprocess.CalledProcessError as e: raise Exception("command exited with error({}): {}".format(str(e.returncode), command)) return out
def get_shodan(self, api_key): try: self.ip = socket.gethostbyname(self.hostname) except Exception as ex: ui.debug("Could not fetch host IP address", self.hostname, ex) if len(api_key) == 0: ui.debug(self.email, "Setting default Shodan API KEY") api_key = "UNmOjxeFS2mPA3kmzm1sZwC0XjaTTksy" # From Infoga tool try: url = "https://api.shodan.io/shodan/host/{target}?key={key}".format( target=self.ip, key=api_key) response = self.make_request(url) data = json.loads(response.text) self.rev_ports.extend(data["ports"]) self.rev_dns.extend(data["hostnames"]) except Exception as ex: ui.warning(ui.yellow, "Shodan error for:", self.email, ex)
def __fetch_includes(self, target_name, in_filename, include_dirs, compiler_flags): ui.debug("getting includes for " + in_filename) with ui.ident: cache_file = self.cache_directory( target_name) + in_filename + ".includes" includes = None if os.path.exists(cache_file) and fsutils.is_newer_than( cache_file, in_filename): includes = marshal.load(open(cache_file, "rb")) else: fsutils.mkdir_recursive(os.path.dirname(cache_file)) includes = self.__scan_includes(in_filename, include_dirs, compiler_flags) marshal.dump(includes, open(cache_file, "wb")) return includes
def mqttTelemetry(): '''publish telemetry data to MQTT broker every mqtt_telemetry_interval seconds''' global mqclient, watchdog # Reset the watchdog watchdog.reset() # Sort the telemetry data and clear the global stelemetry = sorted(globs.telemetry) globs.telemetry = [] try: avgskew = sum(stelemetry)/len(stelemetry) except ZeroDivisionError: # This will happen when the clock isn't running return drift = -avgskew*86400/cfg.p_period percents = round_to_100([ 100*len([element for element in stelemetry if element < cfg.p_offset-cfg.p_tolerance2])/len(stelemetry), 100*len([element for element in stelemetry if cfg.p_offset-cfg.p_tolerance2 <= element < cfg.p_offset-cfg.p_tolerance1])/len(stelemetry), 100*len([element for element in stelemetry if abs(cfg.p_offset-element) <= cfg.p_tolerance1])/len(stelemetry), 100*len([element for element in stelemetry if cfg.p_offset+cfg.p_tolerance1 < element <= cfg.p_offset+cfg.p_tolerance2])/len(stelemetry), 100*len([element for element in stelemetry if element > cfg.p_offset+cfg.p_tolerance2])/len(stelemetry) ]) payload = { 'interval' : cfg.mqtt_telemetry_interval, 'avgskew' : avgskew, 'maxskew' : int(stelemetry[-1]), 'minskew' : int(stelemetry[0]), 'bad-' : percents[0], 'warn-' : percents[1], 'good' : percents[2], 'warn+' : percents[3], 'bad+' : percents[4], 'drift' : round(drift,1), '1mdrift' : globs.driftavgs[0], '1hdrift' : globs.driftavgs[1], '1ddrift' : globs.driftavgs[2], '95max' : int(stelemetry[int(len(stelemetry)*0.95)]), '95min' : int(stelemetry[int(len(stelemetry)*0.05)]), 'temperature' : globs.temperature, 'humidity' : globs.humidity, } debug('MQTT publish: {}/telemetry: {}'.format(cfg.mqtt_topicbase, payload), 3) payload = json.dumps(payload) mqclient.publish('{}/telemetry'.format(cfg.mqtt_topicbase), payload=payload, qos=0, retain=False)
def build_objects(self, toolchain): object_files = [] evaluated_sources = self.cxx_parameters.sources.eval() evaluated_include_dirs = self.cxx_parameters.include_dirs.eval() evaluated_compiler_flags = self.cxx_parameters.compiler_flags.eval() ui.debug("building objects from {!s}".format(evaluated_sources)) ui.push() threads = [] jobs = command_line.args.jobs limit_semaphore = threading.Semaphore(int(jobs)) ui.debug("limiting jobs to {!s}".format(jobs)) for source in evaluated_sources: object_file = toolchain.object_filename( self.common_parameters.name, source) object_files.append(object_file) thread = threading.Thread( target=self._build_object, args=(limit_semaphore, toolchain, self.common_parameters.name, object_file, source, evaluated_include_dirs, evaluated_compiler_flags)) threads.append(thread) thread.daemon = True thread.start() assert len(threads) <= jobs for t in threads: t.join() if self.error: ui.fatal("failed building {!s}: {!s}".format( self.common_parameters.name, self.error_reason)) ui.pop() return object_files
def dbopen(): '''open the database ''' global dbx dbx = None debug('Opening database', 2) # if cfg.ui_debugmod['db']: uiq.put(('Opening database', 'DEBUG', 2)) try: dbx = sqlite3.connect(cfg.db_file) # dbx.isolation_level = 'EXCLUSIVE' # FIXME - this seems not to lock against other processes # dbx.execute('BEGIN EXCLUSIVE') sql = "CREATE TABLE IF NOT EXISTS beats ({});".format(globs.sqltable) cur = dbx.cursor() try: cur.execute(sql) dbx.commit() except Error as dberr: uiq.put(('Database table create error: {}'.format(dberr), 'ERR')) except Error as dberr: uiq.put(('Database open error: {}'.format(dberr), 'ERR')) return dbx
def eval(self): ui.debug("evaluating {!s}".format(self)) parts = self.name.split(".") if len(parts) == 1: self.module = self.module self.name = parts[0] elif len(parts) == 2: self.module = parts[0][1:] # lose the $ self.name = "$" + parts[1] global modules if self.module not in modules: ui.parse_error(msg="no such module: " + self.module) if self.name not in modules[self.module]: ui.fatal("{!s} does not exist".format(self)) return modules[self.module][self.name].eval()
def __try_run(self, cmds): evaluated_cmds = cmds.eval() if evaluated_cmds: root_dir = os.getcwd() os.chdir(self.common_parameters.root_path) evaluated_artefacts = self.common_parameters.artefacts.eval() evaluated_prerequisites = self.common_parameters.prerequisites.eval() should_run = self.__are_explicit_prerequisities_newer(evaluated_artefacts, evaluated_prerequisites) if should_run: variables.pollute_environment(self.common_parameters.module_name) for cmd in evaluated_cmds: ui.debug("running {!s}".format(cmd)) shell.execute(cmd) os.chdir(root_dir)
def __init__(self, filename): assert isinstance(filename, str) ui.debug("parsing " + filename) with ui.ident: self.filename = filename self.name = self.__get_module_name(filename) self.tokens = lexer.parse(filename) self.__parse() variables.add( self.name, "$__path", os.path.dirname(filename)) variables.add_empty( self.name, "$__null")
def eval(self): ui.debug("evaluating {!s}: ".format(self)) s = self.content ret = [] STATE_READING = 1 STATE_WAITING_FOR_PARENTHESIS = 2 STATE_READING_NAME = 3 variable_name = "$" state = STATE_READING for c in s: if state == STATE_READING: if c == "$": state = STATE_WAITING_FOR_PARENTHESIS else: ret.append(c) elif state == STATE_WAITING_FOR_PARENTHESIS: if c == "{": state = STATE_READING_NAME else: ui.parse_error(msg="expecting { after $") elif state == STATE_READING_NAME: if c == "}": ui.debug("variable: " + variable_name) variable = ReferenceToVariable(self.module, variable_name) ret.append(" ".join(variable.eval())) variable_name = "$" state = STATE_READING else: variable_name += c elif state == STATE_READING_NAME: variable_name = variable_name + c return ["".join(ret)]
def build_objects(self, toolchain): object_files = [] evaluated_sources = self.cxx_parameters.sources.eval() evaluated_include_dirs = self.cxx_parameters.include_dirs.eval() evaluated_compiler_flags = self.cxx_parameters.compiler_flags.eval() ui.debug("building objects from {!s}".format(evaluated_sources)) ui.push() threads = [] jobs = command_line.args.jobs limit_semaphore = threading.Semaphore(int(jobs)) ui.debug("limiting jobs to {!s}".format(jobs)) for source in evaluated_sources: object_file = toolchain.object_filename(self.common_parameters.name, source) object_files.append(object_file) thread = threading.Thread(target=self._build_object, args=(limit_semaphore, toolchain, self.common_parameters.name, object_file, source, evaluated_include_dirs, evaluated_compiler_flags)) threads.append(thread) thread.daemon = True thread.start() assert len(threads) <= jobs for t in threads: t.join() if self.error: ui.fatal("failed building {!s}: {!s}" .format(self.common_parameters.name, self.error_reason)) ui.pop() return object_files
def build_object(self, target_name, out_filename, in_filename, include_dirs, compiler_flags): abs_source = os.path.join(os.getcwd(), in_filename) ui.debug("building object " + out_filename) with ui.ident: prerequisites = self.__fetch_includes(target_name, abs_source, include_dirs, compiler_flags) prerequisites.append(in_filename) ui.debug("appending prerequisites from pake modules: {!s}" .format(fsutils.pake_files)) prerequisites.extend(fsutils.pake_files) ui.debug("prerequisites: {!r}".format(prerequisites)) if fsutils.is_any_newer_than(prerequisites, out_filename): fsutils.mkdir_recursive(os.path.dirname(out_filename)); cmd = configurations.compiler() + " " + self.__prepare_compiler_flags(include_dirs, compiler_flags) + " -c -o " + out_filename + " " + abs_source if command_line.args.verbose: ui.step(configurations.compiler(), cmd) else: ui.step(configurations.compiler(), in_filename) shell.execute(cmd)
def link_application(self, out_filename, in_filenames, link_with, library_dirs): if fsutils.is_any_newer_than(in_filenames, out_filename) or self.__are_libs_newer_than_target(link_with, out_filename): ui.debug("linking application") ui.debug(" files: " + str(in_filenames)) ui.debug(" with libs: " + str(link_with)) ui.debug(" lib dirs: " + str(library_dirs)) parameters = " ".join("-L " + lib_dir for lib_dir in library_dirs) ui.bigstep("linking", out_filename) try: shell.execute(" ".join([configurations.compiler(), configurations.linker_flags(), "-o", out_filename, " ".join(in_filenames), self.__prepare_linker_flags(link_with), parameters])) except Exception as e: ui.fatal("cannot link {}, reason: {!s}".format(out_filename, e)) else: ui.bigstep("up to date", out_filename)
def __are_explicit_prerequisities_newer(self, artefacts, prerequisites): ui.debug("checking prerequisites ({!s}) for making {!s}" .format(prerequisites, artefacts)) if prerequisites and artefacts: for artefact in artefacts: ui.debug(" " + artefact) if fsutils.is_any_newer_than(prerequisites, artefact): ui.debug(("going on because {!s}" "needs to be rebuilt").format(artefact)) return True return False else: return True
def pollute_environment(current_module): ui.debug("polluting environment") with ui.ident: for module in modules: for (name, variable) in modules[module].iteritems(): evaluated = variable.eval() env_name = module + "_" + name[1:] os.environ[env_name] = " ".join(evaluated) ui.debug(" " + env_name + ": " + str(evaluated)) if module == current_module: env_short_name = name[1:] os.environ[env_short_name] = " ".join(evaluated) ui.debug(" " + env_short_name + ": " + str(evaluated))
def _build_and_track_single_target(name): """ tracking means putting it to special container, when this function is called with the same target, it will be skipped """ configuration = configurations.get_selected_configuration() fsutils.make_build_dir(configuration.name) ui.debug("building {} with configuration {!s}".format(name, configuration)) with ui.ident: if name in _built_targets: ui.debug("{} already build, skipping".format(name)) return else: _built_targets.append(name) if name not in targets: ui.fatal("target {} not found".format(name)) target = targets[name] if not target.is_visible(configuration): ui.fatal("target {} is not visible in {!s}" .format(name, configuration)) for dependency in target.common_parameters.depends_on.eval(): ui.debug("{} depends on {}".format(name, dependency)) build(dependency) toolchain = compiler.Gnu() target.before() target.build(toolchain) target.after() target.copy_resources(toolchain)
def add(module_name, name, value): variable = Variable(module_name, name, value) modules[module_name][name] = variable ui.debug("adding variable: {!s}".format(variable))
def add_empty(module_name, name): variable = Variable(name=name) modules[module_name][name] = variable ui.debug("adding variable: {!s}".format(variable))
def __init__(self, filename): self.filename = filename buf = FileReader(filename) self.tokens = [] self.__tokenize(buf) ui.debug("tokens: " + str(self.tokens))
def add_target(target): ui.debug("adding target: {!s}".format(target)) targets[target.common_parameters.name] = target
def add_configuration(configuration): ui.debug("adding configuration: " + str(configuration)) configurations[configuration.name] = configuration
def __prepare_include_dirs_parameters(self, include_dirs): ret = " ".join("-I" + include_dir for include_dir in include_dirs) ui.debug("include parameters: " + ret) return ret