def _get_build_flags_build_communicate_cb(self, subprocess, result, task): """ Completes the asynchronous request to get the build flags from the make helper subprocess. """ try: _, stdout, stderr = subprocess.communicate_utf8_finish(result) info = {} for line in stdout.split('\n'): if '=' in line: k, v = line.split('=', 1) info[k.strip()] = v.strip() if task.type == _TYPE_C: flags = info.get('CFLAGS', '') + " " + info.get('INCLUDES', '') elif task.type == _TYPE_CPLUSPLUS: flags = info.get('CXXFLAGS', '') + " " + info.get( 'INCLUDES', '') else: raise RuntimeError _, build_flags = GLib.shell_parse_argv(flags) task.build_flags = build_flags task.return_boolean(True) except Exception as ex: print(repr(ex)) task.return_error(GLib.Error(message=repr(ex)))
def _get_build_flags_build_communicate_cb(self, subprocess, result, task): """ Completes the asynchronous request to get the build flags from the make helper subprocess. """ try: _, stdout, stderr = subprocess.communicate_utf8_finish(result) info = {} for line in stdout.split('\n'): if '=' in line: k,v = line.split('=', 1) info[k.strip()] = v.strip() if task.type == _TYPE_C: flags = info.get('CFLAGS', '') + " " + info.get('INCLUDES', '') elif task.type == _TYPE_CPLUSPLUS: flags = info.get('CXXFLAGS', '') + " " + info.get('INCLUDES', '') else: raise RuntimeError _, build_flags = GLib.shell_parse_argv(flags) task.build_flags = build_flags task.return_boolean(True) except Exception as ex: print(repr(ex)) task.return_error(GLib.Error(message=repr(ex)))
def _vmUpOperation(self): assert self.vmPid is None dbusObj = dbus.SystemBus().get_object('org.fpemud.VirtService', '/org/fpemud/VirtService') try: self.maxDriveId = 0 self.vmTmpDir = tempfile.mkdtemp(prefix="lightbox.vm.") self.vsVmResSetId = dbusObj.NewVmResSet( dbus_interface='org.fpemud.VirtService') resSetObj = dbus.SystemBus().get_object( 'org.fpemud.VirtService', '/org/fpemud/VirtService/%d/VmResSets/%d' % (os.getuid(), self.vsVmResSetId)) if self.vmCfg.networkAdapterInterface == "virtio": # resSetObj.AddTapIntf(self.vmEnv.getVirtioNetworkType()) resSetObj.AddTapIntf( "nat", dbus_interface='org.fpemud.VirtService.VmResSet') self.vsVmId = dbusObj.AttachVm( self.vmDir, self.vsVmResSetId, dbus_interface='org.fpemud.VirtService') self.vsTapIfName = resSetObj.GetTapIntf( dbus_interface='org.fpemud.VirtService.VmResSet') self.vsMacAddr = resSetObj.GetVmMacAddr( dbus_interface='org.fpemud.VirtService.VmResSet') self.vsIpAddr = resSetObj.GetVmIpAddr( dbus_interface='org.fpemud.VirtService.VmResSet') self.spicePort = FvpUtil.getFreeSocketPort( "tcp", self.param.spicePortStart, self.param.spicePortEnd) self.qmpPort = FvpUtil.getFreeSocketPort("tcp") qemuCmd = self._generateQemuCommand() mycwd = os.getcwd() os.chdir(self.vmDir) try: targc, targv = GLib.shell_parse_argv(qemuCmd) ret = GLib.spawn_async(targv, flags=GLib.SpawnFlags.DO_NOT_REAP_CHILD) self.vmPid = ret[0] self.vmPidWatch = GLib.child_watch_add(self.vmPid, self.onVmExit) time.sleep( 1) # fixme: should change fvp_vm_view, repeat connect finally: os.chdir(mycwd) self.qmpObj = qmp.QmpClient() self.qmpObj.connect_tcp("127.0.0.1", self.qmpPort) self.state = FvpVmObject.STATE_POWER_ON self.notify("state") except: self._vmDownOperation(True, dbusObj) raise
def build_flags_thread(): try: with open(commands_file) as f: commands = json.loads(f.read(), encoding='utf-8') except (json.JSONDecodeError, FileNotFoundError, UnicodeDecodeError) as e: task.return_error( GLib.Error('Failed to decode meson json: {}'.format(e))) return infile = task.ifile.get_path() for c in commands: filepath = path.normpath(path.join(c['directory'], c['file'])) if filepath == infile: try: task.build_flags = extract_flags( c['command'], builddir) except GLib.Error as e: task.return_error(e) return break if infile.endswith('.vala'): # We didn't find anything in the compile_commands.json, so now try to use # the compdb from ninja and see if it has anything useful for us. ninja = None for name in _NINJA_NAMES: if runtime.contains_program_in_path(name): ninja = name break if ninja: ret = execInRuntime(runtime, ninja, '-t', 'compdb', 'vala_COMPILER', directory=builddir) try: commands = json.loads(ret, encoding='utf-8') except Exception as e: task.return_error( GLib.Error( 'Failed to decode ninja json: {}'.format(e))) return for c in commands: try: _, argv = GLib.shell_parse_argv(c['command']) # TODO: It would be nice to filter these arguments a bit, # but the vala plugin should handle that fine. task.build_flags = argv task.return_boolean(True) return except: pass print('Meson: Warning: No flags found') task.return_boolean(True)
def validate_exec_line(self, string): try: success, parsed = GLib.shell_parse_argv(string) if GLib.find_program_in_path(parsed[0]) or ((not os.path.isdir(parsed[0])) and os.access(parsed[0], os.X_OK)): return True except: pass return False
def do_load(self, pipeline): context = pipeline.get_context() build_system = context.get_build_system() if type(build_system) != PHPizeBuildSystem: return config = pipeline.get_configuration() runtime = config.get_runtime() srcdir = pipeline.get_srcdir() builddir = pipeline.get_builddir() # Bootstrap by calling phpize in the source directory bootstrap_launcher = pipeline.create_launcher() bootstrap_launcher.push_argv('phpize') bootstrap_launcher.set_cwd(srcdir) bootstrap_stage = Ide.BuildStageLauncher.new(context, bootstrap_launcher) bootstrap_stage.set_name(_("Bootstrapping project")) bootstrap_stage.set_completed(os.path.exists(os.path.join(srcdir, 'configure'))) self.track(pipeline.connect(Ide.BuildPhase.AUTOGEN, 0, bootstrap_stage)) # Configure the project using autoconf. We run from builddir. config_launcher = pipeline.create_launcher() config_launcher.set_flags(Gio.SubprocessFlags.STDIN_PIPE | Gio.SubprocessFlags.STDOUT_PIPE | Gio.SubprocessFlags.STDERR_PIPE) config_launcher.push_argv(os.path.join(srcdir, 'configure')) config_launcher.push_argv("--prefix={}".format(config.get_prefix())) config_opts = config.get_config_opts() if config_opts: _, config_opts = GLib.shell_parse_argv(config_opts) config_launcher.push_args(config_opts) config_stage = Ide.BuildStageLauncher.new(context, config_launcher) config_stage.set_name(_("Configuring project")) self.track(pipeline.connect(Ide.BuildPhase.CONFIGURE, 0, config_stage)) # Build the project using make. build_launcher = pipeline.create_launcher() build_launcher.push_argv('make') if config.props.parallelism > 0: build_launcher.push_argv('-j{}'.format(config.props.parallelism)) clean_launcher = pipeline.create_launcher() clean_launcher.push_argv('make') clean_launcher.push_argv('clean') build_stage = Ide.BuildStageLauncher.new(context, build_launcher) build_stage.set_name(_("Building project")) build_stage.set_clean_launcher(clean_launcher) build_stage.connect('query', self._query) self.track(pipeline.connect(Ide.BuildPhase.BUILD, 0, build_stage)) # Use "make install" to install the project. install_launcher = pipeline.create_launcher() install_launcher.push_argv('make') install_launcher.push_argv('install') install_stage = Ide.BuildStageLauncher.new(context, install_launcher) install_stage.set_name(_("Installing project")) self.track(pipeline.connect(Ide.BuildPhase.INSTALL, 0, install_stage))
def do_load(self, pipeline): context = pipeline.get_context() build_system = Ide.BuildSystem.from_context(context) if type(build_system) != PHPizeBuildSystem: return config = pipeline.get_config() runtime = config.get_runtime() srcdir = pipeline.get_srcdir() builddir = pipeline.get_builddir() # Bootstrap by calling phpize in the source directory bootstrap_launcher = pipeline.create_launcher() bootstrap_launcher.push_argv('phpize') bootstrap_launcher.set_cwd(srcdir) bootstrap_stage = Ide.PipelineStageLauncher.new(context, bootstrap_launcher) bootstrap_stage.set_name(_("Bootstrapping project")) bootstrap_stage.set_completed(os.path.exists(os.path.join(srcdir, 'configure'))) self.track(pipeline.attach(Ide.PipelinePhase.AUTOGEN, 0, bootstrap_stage)) # Configure the project using autoconf. We run from builddir. config_launcher = pipeline.create_launcher() config_launcher.set_flags(Gio.SubprocessFlags.STDIN_PIPE | Gio.SubprocessFlags.STDOUT_PIPE | Gio.SubprocessFlags.STDERR_PIPE) config_launcher.push_argv(os.path.join(srcdir, 'configure')) config_launcher.push_argv("--prefix={}".format(config.get_prefix())) config_opts = config.get_config_opts() if config_opts: _, config_opts = GLib.shell_parse_argv(config_opts) config_launcher.push_args(config_opts) config_stage = Ide.PipelineStageLauncher.new(context, config_launcher) config_stage.set_name(_("Configuring project")) self.track(pipeline.attach(Ide.PipelinePhase.CONFIGURE, 0, config_stage)) # Build the project using make. build_launcher = pipeline.create_launcher() build_launcher.push_argv('make') if config.props.parallelism > 0: build_launcher.push_argv('-j{}'.format(config.props.parallelism)) clean_launcher = pipeline.create_launcher() clean_launcher.push_argv('make') clean_launcher.push_argv('clean') build_stage = Ide.PipelineStageLauncher.new(context, build_launcher) build_stage.set_name(_("Building project")) build_stage.set_clean_launcher(clean_launcher) build_stage.connect('query', self._query) self.track(pipeline.attach(Ide.PipelinePhase.BUILD, 0, build_stage)) # Use "make install" to install the project. install_launcher = pipeline.create_launcher() install_launcher.push_argv('make') install_launcher.push_argv('install') install_stage = Ide.PipelineStageLauncher.new(context, install_launcher) install_stage.set_name(_("Installing project")) self.track(pipeline.attach(Ide.PipelinePhase.INSTALL, 0, install_stage))
def build_flags_thread(): try: with open(commands_file) as f: commands = json.loads(f.read(), encoding='utf-8') except (json.JSONDecodeError, FileNotFoundError, UnicodeDecodeError) as e: task.return_error(GLib.Error('Failed to decode meson json: {}'.format(e))) return infile = task.ifile.get_path() # If this is a header file we want the flags for a C/C++/Objc file. # (Extensions Match GtkSourceViews list) is_header = infile.endswith(('.h', '.hpp', '.hh', '.h++', '.hp')) if is_header: # So just try to find a compilable file with the same prefix as # that is *probably* correct. infile = infile.rpartition('.')[0] + '.' for c in commands: filepath = path.normpath(path.join(c['directory'], c['file'])) if (is_header is False and filepath == infile) or \ (is_header is True and filepath.startswith(infile)): try: task.build_flags = extract_flags(c['command'], builddir) except GLib.Error as e: task.return_error(e) return break if infile.endswith('.vala'): # We didn't find anything in the compile_commands.json, so now try to use # the compdb from ninja and see if it has anything useful for us. ninja = None for name in _NINJA_NAMES: if runtime.contains_program_in_path(name): ninja = name break if ninja: ret = execInRuntime(runtime, ninja, '-t', 'compdb', 'vala_COMPILER', directory=builddir) try: commands = json.loads(ret, encoding='utf-8') except Exception as e: task.return_error(GLib.Error('Failed to decode ninja json: {}'.format(e))) return for c in commands: try: _, argv = GLib.shell_parse_argv(c['command']) # TODO: It would be nice to filter these arguments a bit, # but the vala plugin should handle that fine. task.build_flags = argv task.return_boolean(True) return except: pass Ide.debug('No flags found for file', infile) task.return_boolean(True)
def exec_line_is_valid(self, exec_text): try: success, parsed = GLib.shell_parse_argv(exec_text) # Make sure program (first part of the command) is in the path command = parsed[0] return (GLib.find_program_in_path(command) is not None) except GLib.GError: return False
def do_load(self, pipeline): context = self.get_context() build_system = Ide.BuildSystem.from_context(context) srcdir = pipeline.get_srcdir() config = pipeline.get_config() config_opts = config.get_config_opts() # Ignore pipeline unless this is a waf project if type(build_system) != WafBuildSystem: return waf = os.path.join(srcdir, 'waf') self.python = sniff_python_version(waf) # If waf is in project directory use that self.waf_local = os.path.isfile(waf) # Avoid sniffing again later in targets provider build_system.python = self.python build_system.waf_local = self.waf_local # Launcher for project configuration config_launcher = self._create_launcher(pipeline) config_launcher.set_cwd(srcdir) config_launcher.push_argv('configure') config_launcher.push_argv('--prefix=%s' % config.get_prefix()) if config_opts: try: ret, argv = GLib.shell_parse_argv(config_opts) config_launcher.push_args(argv) except Exception as ex: print(repr(ex)) self.track(pipeline.attach_launcher(Ide.PipelinePhase.CONFIGURE, 0, config_launcher)) # Now create our launcher to build the project build_launcher = self._create_launcher(pipeline) build_launcher.set_cwd(srcdir) build_launcher.push_argv('build') clean_launcher = self._create_launcher(pipeline) clean_launcher.set_cwd(srcdir) clean_launcher.push_argv('clean') build_stage = Ide.PipelineStageLauncher.new(context, build_launcher) build_stage.set_name(_("Building project…")) build_stage.set_clean_launcher(clean_launcher) build_stage.connect('query', self._query) self.track(pipeline.attach(Ide.PipelinePhase.BUILD, 0, build_stage)) install_launcher = self._create_launcher(pipeline) install_launcher.set_cwd(srcdir) install_launcher.push_argv('install') install_stage = Ide.PipelineStageLauncher.new(context, install_launcher) install_stage.set_name(_("Installing project…")) install_stage.connect('query', self._query) self.track(pipeline.attach(Ide.PipelinePhase.INSTALL, 0, install_stage))
def extract_flags(command: str, builddir: str): flags = GLib.shell_parse_argv(command)[1] # Raises on failure wanted_flags = [] for flag in flags: if flag.startswith('-I'): # All paths are relative to build abspath = path.normpath(path.join(builddir, flag[2:])) wanted_flags.append('-I' + abspath) elif flag.startswith(('-isystem', '-W', '-D', '-std')): wanted_flags.append(flag) return wanted_flags
def extract_flags(command: str): flags = GLib.shell_parse_argv(command)[1] # Raises on failure build_dir = self._get_build_dir().get_path() wanted_flags = [] for flag in flags: if flag.startswith('-I'): # All paths are relative to build abspath = path.normpath(path.join(build_dir, flag[2:])) wanted_flags.append('-I' + abspath) elif flag.startswith(('-isystem', '-W', '-D')): wanted_flags.append(flag) return wanted_flags
def find_program(program): program = program.strip() if len(program) == 0: return None params = list(GLib.shell_parse_argv(program)[1]) executable = params[0] if os.path.exists(executable): return executable path = GLib.find_program_in_path(executable) if path is not None: return path return None
def check_exec(self, program): program = program.strip() if len(program) == 0: return False params = list(GLib.shell_parse_argv(program)[1]) executable = params[0] if os.path.exists(executable): return True path = GLib.find_program_in_path(executable) if path is not None: return True return False
def run_dialog(self): retval = {"done": False} error_msg = None while self.run() == Gtk.ResponseType.OK: name = self.name_entry.get_text() command = self.command_entry.get_text() comment = self.comment_entry.get_text() delay = str(int(self.spin.get_value())) error_msg = None if command == "" or command.isspace(): error_msg = _("The startup command cannot be empty") else: try: success, argv = GLib.shell_parse_argv(command) except GLib.GError as e: if e: error_msg = e.message else: error_msg = _("The startup command is not valid") if error_msg is not None: msg_box = Gtk.MessageDialog(self, 0, Gtk.MessageType.ERROR, Gtk.ButtonsType.CANCEL, "%s" % error_msg) error_msg = None msg_box.run() msg_box.destroy() continue if name == "" or name.isspace(): retval["name"] = argv[0] else: retval["name"] = name retval["command"] = command retval["comment"] = comment retval["delay"] = delay retval["done"] = True break self.destroy() return retval
def on_ping_clicked(self, button): requests = self.builder.get_object("requests") radio = self.builder.get_object("requests_num") address = self.builder.get_object("address") treeview = self.treeview # Create a new tree model with five columns for ping output. store = Gtk.ListStore.new((str, str, str, str, str)) self.treeview.set_model(store) # Retrieve the current ping parameters entered by the user. num = requests.get_value_as_int() location = address.get_text() # Return if an address was not entered into the GtkEntry widget. if len(location) == 0: return # Otherwise, build the command based upon the user's preferences. elif radio.get_active(): if num == 0: return command = "ping " + location + " -c " + str(num) else: command = "ping " + location # Parse the command and launch the process, monitoring standard output. (bool, argvp) = GLib.shell_parse_argv(command) if bool: (ret, self.cpid, fin, fout, ferr) = GLib.spawn_async_with_pipes(None, argvp, None, GLib.SpawnFlags.SEARCH_PATH, None, None) if not ret: print("The 'ping' instruction has failed!") else: # Disable the Ping button and enable the Stop button. stop = self.builder.get_object("stop") stop.set_sensitive(True) ping = self.builder.get_object("ping") ping.set_sensitive(False) # Create a new IO channel and monitor it for data to read. channel = GLib.IOChannel.unix_new(fout) channel.add_watch( GLib.IOCondition.IN | GLib.IOCondition.ERR | GLib.IOCondition.HUP, self.read_output, None)
def _vmUpOperation(self): assert self.vmPid is None dbusObj = dbus.SystemBus().get_object('org.fpemud.VirtService', '/org/fpemud/VirtService') try: self.maxDriveId = 0 self.vmTmpDir = tempfile.mkdtemp(prefix="lightbox.vm.") self.vsVmResSetId = dbusObj.NewVmResSet(dbus_interface='org.fpemud.VirtService') resSetObj = dbus.SystemBus().get_object('org.fpemud.VirtService', '/org/fpemud/VirtService/%d/VmResSets/%d' % (os.getuid(), self.vsVmResSetId)) if self.vmCfg.networkAdapterInterface == "virtio": # resSetObj.AddTapIntf(self.vmEnv.getVirtioNetworkType()) resSetObj.AddTapIntf("nat", dbus_interface='org.fpemud.VirtService.VmResSet') self.vsVmId = dbusObj.AttachVm(self.vmDir, self.vsVmResSetId, dbus_interface='org.fpemud.VirtService') self.vsTapIfName = resSetObj.GetTapIntf(dbus_interface='org.fpemud.VirtService.VmResSet') self.vsMacAddr = resSetObj.GetVmMacAddr(dbus_interface='org.fpemud.VirtService.VmResSet') self.vsIpAddr = resSetObj.GetVmIpAddr(dbus_interface='org.fpemud.VirtService.VmResSet') self.spicePort = FvpUtil.getFreeSocketPort("tcp", self.param.spicePortStart, self.param.spicePortEnd) self.qmpPort = FvpUtil.getFreeSocketPort("tcp") qemuCmd = self._generateQemuCommand() mycwd = os.getcwd() os.chdir(self.vmDir) try: targc, targv = GLib.shell_parse_argv(qemuCmd) ret = GLib.spawn_async(targv, flags=GLib.SpawnFlags.DO_NOT_REAP_CHILD) self.vmPid = ret[0] self.vmPidWatch = GLib.child_watch_add(self.vmPid, self.onVmExit) time.sleep(1) # fixme: should change fvp_vm_view, repeat connect finally: os.chdir(mycwd) self.qmpObj = qmp.QmpClient() self.qmpObj.connect_tcp("127.0.0.1", self.qmpPort) self.state = FvpVmObject.STATE_POWER_ON self.notify("state") except: self._vmDownOperation(True, dbusObj) raise
def do_load(self, pipeline): context = self.get_context() build_system = Ide.BuildSystem.from_context(context) # Ignore pipeline unless this is a cargo project if type(build_system) != CargoBuildSystem: return project_file = build_system.props.project_file if project_file.get_basename() != 'Cargo.toml': project_file = project_file.get_child('Cargo.toml') cargo_toml = project_file.get_path() config = pipeline.get_config() builddir = pipeline.get_builddir() runtime = config.get_runtime() config_opts = config.get_config_opts() # We might need to use cargo from ~/.cargo/bin cargo = locate_cargo_from_config(config) # Fetch dependencies so that we no longer need network access fetch_launcher = pipeline.create_launcher() fetch_launcher.setenv('CARGO_TARGET_DIR', builddir, True) fetch_launcher.push_argv(cargo) fetch_launcher.push_argv('fetch') fetch_launcher.push_argv('--manifest-path') fetch_launcher.push_argv(cargo_toml) self.track(pipeline.attach_launcher(Ide.PipelinePhase.DOWNLOADS, 0, fetch_launcher)) # Now create our launcher to build the project build_launcher = pipeline.create_launcher() build_launcher.setenv('CARGO_TARGET_DIR', builddir, True) build_launcher.push_argv(cargo) build_launcher.push_argv('rustc') build_launcher.push_argv('--manifest-path') build_launcher.push_argv(cargo_toml) build_launcher.push_argv('--message-format') build_launcher.push_argv('human') if not pipeline.is_native(): build_launcher.push_argv('--target') build_launcher.push_argv(pipeline.get_host_triplet().get_full_name()) if config.props.parallelism > 0: build_launcher.push_argv('-j{}'.format(config.props.parallelism)) if not config.props.debug: build_launcher.push_argv('--release') # Configure Options get passed to "cargo rustc" because there is no # equivalent "configure stage" for cargo. if config_opts: try: ret, argv = GLib.shell_parse_argv(config_opts) build_launcher.push_args(argv) except Exception as ex: print(repr(ex)) clean_launcher = pipeline.create_launcher() clean_launcher.setenv('CARGO_TARGET_DIR', builddir, True) clean_launcher.push_argv(cargo) clean_launcher.push_argv('clean') clean_launcher.push_argv('--manifest-path') clean_launcher.push_argv(cargo_toml) build_stage = Ide.PipelineStageLauncher.new(context, build_launcher) build_stage.set_name(_("Building project")) build_stage.set_clean_launcher(clean_launcher) build_stage.connect('query', self._query) self.track(pipeline.attach(Ide.PipelinePhase.BUILD, 0, build_stage))
def do_load(self, pipeline): context = self.get_context() build_system = Ide.BuildSystem.from_context(context) srcdir = pipeline.get_srcdir() config = pipeline.get_config() config_opts = config.get_config_opts() # Ignore pipeline unless this is a waf project if type(build_system) != WafBuildSystem: return waf = os.path.join(srcdir, 'waf') self.python = sniff_python_version(waf) # If waf is in project directory use that self.waf_local = os.path.isfile(waf) # Avoid sniffing again later in targets provider build_system.python = self.python build_system.waf_local = self.waf_local # Launcher for project configuration config_launcher = self._create_launcher(pipeline) config_launcher.set_cwd(srcdir) config_launcher.push_argv('configure') config_launcher.push_argv('--prefix=%s' % config.get_prefix()) if config_opts: try: ret, argv = GLib.shell_parse_argv(config_opts) config_launcher.push_args(argv) except Exception as ex: print(repr(ex)) self.track( pipeline.attach_launcher(Ide.PipelinePhase.CONFIGURE, 0, config_launcher)) # Now create our launcher to build the project build_launcher = self._create_launcher(pipeline) build_launcher.set_cwd(srcdir) build_launcher.push_argv('build') clean_launcher = self._create_launcher(pipeline) clean_launcher.set_cwd(srcdir) clean_launcher.push_argv('clean') build_stage = Ide.PipelineStageLauncher.new(context, build_launcher) build_stage.set_name(_("Building project…")) build_stage.set_clean_launcher(clean_launcher) build_stage.connect('query', self._query) self.track(pipeline.attach(Ide.PipelinePhase.BUILD, 0, build_stage)) install_launcher = self._create_launcher(pipeline) install_launcher.set_cwd(srcdir) install_launcher.push_argv('install') install_stage = Ide.PipelineStageLauncher.new(context, install_launcher) install_stage.set_name(_("Installing project…")) install_stage.connect('query', self._query) self.track(pipeline.attach(Ide.PipelinePhase.INSTALL, 0, install_stage))
def do_load(self, pipeline): context = self.get_context() build_system = Ide.BuildSystem.from_context(context) # Ignore pipeline unless this is a cargo project if type(build_system) != CargoBuildSystem: return project_file = build_system.props.project_file if project_file.get_basename() != 'Cargo.toml': project_file = project_file.get_child('Cargo.toml') cargo_toml = project_file.get_path() config = pipeline.get_config() builddir = pipeline.get_builddir() runtime = config.get_runtime() config_opts = config.get_config_opts() # We might need to use cargo from ~/.cargo/bin cargo = locate_cargo_from_config(config) # Fetch dependencies so that we no longer need network access fetch_launcher = pipeline.create_launcher() fetch_launcher.setenv('CARGO_TARGET_DIR', builddir, True) fetch_launcher.push_argv(cargo) fetch_launcher.push_argv('fetch') fetch_launcher.push_argv('--manifest-path') fetch_launcher.push_argv(cargo_toml) self.track( pipeline.attach_launcher(Ide.PipelinePhase.DOWNLOADS, 0, fetch_launcher)) # Now create our launcher to build the project build_launcher = pipeline.create_launcher() build_launcher.setenv('CARGO_TARGET_DIR', builddir, True) build_launcher.push_argv(cargo) build_launcher.push_argv('rustc') build_launcher.push_argv('--manifest-path') build_launcher.push_argv(cargo_toml) build_launcher.push_argv('--message-format') build_launcher.push_argv('human') if not pipeline.is_native(): build_launcher.push_argv('--target') build_launcher.push_argv( pipeline.get_host_triplet().get_full_name()) if config.props.parallelism > 0: build_launcher.push_argv('-j{}'.format(config.props.parallelism)) if not config.props.debug: build_launcher.push_argv('--release') # Configure Options get passed to "cargo rustc" because there is no # equivalent "configure stage" for cargo. if config_opts: try: ret, argv = GLib.shell_parse_argv(config_opts) build_launcher.push_args(argv) except Exception as ex: print(repr(ex)) clean_launcher = pipeline.create_launcher() clean_launcher.setenv('CARGO_TARGET_DIR', builddir, True) clean_launcher.push_argv(cargo) clean_launcher.push_argv('clean') clean_launcher.push_argv('--manifest-path') clean_launcher.push_argv(cargo_toml) build_stage = Ide.PipelineStageLauncher.new(context, build_launcher) build_stage.set_name(_("Building project")) build_stage.set_clean_launcher(clean_launcher) build_stage.connect('query', self._query) self.track(pipeline.attach(Ide.PipelinePhase.BUILD, 0, build_stage))
def do_load(self, pipeline): context = pipeline.get_context() build_system = context.get_build_system() # Only register stages if we are a cmake project if type(build_system) != CMakeBuildSystem: return config = pipeline.get_configuration() runtime = config.get_runtime() srcdir = context.get_vcs().get_working_directory().get_path() builddir = build_system.get_builddir(config) # Discover ninja in the runtime/SDK if not runtime.contains_program_in_path(_CMAKE): print("Failed to locate “cmake”. Building is disabled.") return # Discover ninja in the runtime/SDK ninja = None for name in _NINJA_NAMES: if runtime.contains_program_in_path(name): ninja = name break if ninja is None: print("Failed to locate ninja. CMake building is disabled.") return # Register the configuration launcher which will perform our # "cmake -DCMAKE_INSTALL_PREFIX=..." configuration command. config_launcher = pipeline.create_launcher() config_launcher.push_argv(_CMAKE) # We need the parent directory of CMakeLists.txt, not the CMakeLists.txt # itself (or cmake will do in-tree configuration) config_launcher.push_argv( build_system.project_file.get_parent().get_path()) config_launcher.push_argv('-G') config_launcher.push_argv('Ninja') config_launcher.push_argv('-DCMAKE_INSTALL_PREFIX={}'.format( config.props.prefix)) config_opts = config.get_config_opts() if config_opts: _, config_opts = GLib.shell_parse_argv(config_opts) config_launcher.push_args(config_opts) config_stage = Ide.BuildStageLauncher.new(context, config_launcher) config_stage.set_completed( path.exists(path.join(builddir, 'build.ninja'))) self.track(pipeline.connect(Ide.BuildPhase.CONFIGURE, 0, config_stage)) # Register the build launcher which will perform the incremental # build of the project when the Ide.BuildPhase.BUILD phase is # requested of the pipeline. build_launcher = pipeline.create_launcher() build_launcher.push_argv(ninja) if config.props.parallelism > 0: build_launcher.push_argv('-j{}'.format(config.props.parallelism)) clean_launcher = pipeline.create_launcher() clean_launcher.push_argv(ninja) clean_launcher.push_argv('clean') if config.props.parallelism > 0: clean_launcher.push_argv('-j{}'.format(config.props.parallelism)) build_stage = Ide.BuildStageLauncher.new(context, build_launcher) build_stage.set_clean_launcher(clean_launcher) build_stage.set_check_stdout(True) build_stage.connect('query', self._query) self.track(pipeline.connect(Ide.BuildPhase.BUILD, 0, build_stage)) # Register the install launcher which will perform our # "ninja install" when the Ide.BuildPhase.INSTALL phase # is requested of the pipeline. install_launcher = pipeline.create_launcher() install_launcher.push_argv(ninja) install_launcher.push_argv('install') install_stage = Ide.BuildStageLauncher.new(context, install_launcher) self.track(pipeline.connect(Ide.BuildPhase.INSTALL, 0, install_stage))
def do_load(self, pipeline): context = pipeline.get_context() build_system = context.get_build_system() # Only register stages if we are a meson project if type(build_system) != MesonBuildSystem: return config = pipeline.get_configuration() runtime = config.get_runtime() srcdir = context.get_vcs().get_working_directory().get_path() builddir = build_system.get_builddir(config) # Discover ninja in the runtime/SDK ninja = None for name in _NINJA_NAMES: if runtime.contains_program_in_path(name): ninja = name break if ninja is None: print("Failed to locate ninja. Meson Building is disabled.") return # Register the configuration launcher which will perform our # "meson --prefix=..." configuration command. config_launcher = pipeline.create_launcher() config_launcher.push_argv('meson') config_launcher.push_argv(srcdir) # We will be launched from the builddir, so . is fine (as the directory # may be mapped somewhere else in the build runtime). config_launcher.push_argv('.') config_launcher.push_argv('--prefix={}'.format(config.props.prefix)) config_opts = config.get_config_opts() if config_opts: _, config_opts = GLib.shell_parse_argv(config_opts) config_launcher.push_args(config_opts) config_stage = Ide.BuildStageLauncher.new(context, config_launcher) config_stage.set_completed(path.exists(path.join(builddir, 'build.ninja'))) self.track(pipeline.connect(Ide.BuildPhase.CONFIGURE, 0, config_stage)) # Register the build launcher which will perform the incremental # build of the project when the Ide.BuildPhase.BUILD phase is # requested of the pipeline. build_launcher = pipeline.create_launcher() build_launcher.push_argv(ninja) if config.props.parallelism > 0: build_launcher.push_argv('-j{}'.format(config.props.parallelism)) clean_launcher = pipeline.create_launcher() clean_launcher.push_argv(ninja) clean_launcher.push_argv('clean') if config.props.parallelism > 0: clean_launcher.push_argv('-j{}'.format(config.props.parallelism)) build_stage = Ide.BuildStageLauncher.new(context, build_launcher) build_stage.set_clean_launcher(clean_launcher) build_stage.set_check_stdout(True) build_stage.connect('query', self._query) self.track(pipeline.connect(Ide.BuildPhase.BUILD, 0, build_stage)) # Register the install launcher which will perform our # "ninja install" when the Ide.BuildPhase.INSTALL phase # is requested of the pipeline. install_launcher = pipeline.create_launcher() install_launcher.push_argv(ninja) install_launcher.push_argv('install') install_stage = Ide.BuildStageLauncher.new(context, install_launcher) install_stage.connect('query', self._query) self.track(pipeline.connect(Ide.BuildPhase.INSTALL, 0, install_stage))
def extract_flags(command: str): flags = GLib.shell_parse_argv(command)[1] # Raises on failure return [ flag for flag in flags if flag.startswith(('-I', '-isystem', '-W', '-D')) ]