def test_launcher(self): # This makes sure we can resolve the configuration file. It's internal use only ! # test import * # Must be done here, otherwise we can 'loose' the plugin for our invocation due to # changing Application states mod = 'bprocess.plugins.be_go' mod = __import__(mod, globals(), locals(), [mod]) go = mod.LauncherBeSubCommand.name go_exec = pseudo_executable(go) bapp.main().context().push(_ProcessControllerContext(go, go_exec, 'doesntmatter', [])) cmd = BeCommand(application=bapp.main()).parse_and_execute assert cmd([go]) == 0, 'empty program will just list executables' assert cmd([go, 'foo']) != 0, 'invalid names are an error' # We are lazy here - instead of launching it through ProcessController.execute_in_current_context() # we just set the CWD for it to pick up the correct configuration cwd = os.getcwd() try: os.chdir(go_exec.dirname()) assert cmd([go] + '+spawn py-program'.split()) == 0, 'can launch programs that exist for him' finally: os.chdir(cwd)
def test_plugins(self, rw_dir): """load all known plugins and dispatch some events""" def raiser(py_file, mod_name): raise AssertionError("loading of plugin '%s' failed") # end prev_dir = os.getcwd() bapp.main().context().push('example plugins') plugin_path = Path(__file__).dirname().dirname() / 'plugins' examples_path = plugin_path.dirname().dirname().dirname() / 'examples' for path in (plugin_path, examples_path): assert path.isdir() assert load_files(path, on_error=raiser) # end for each path to load plugins from try: os.chdir(rw_dir) sg = EventsReadOnlyTestSQLProxyShotgunConnection() engine = EventEngine(sg) for eid in range(1, 1000, 100): sg.next_event_id = sg.first_event_id + eid engine._process_events() # end finally: os.chdir(prev_dir)
def _iter_application_contexts(self): """@return iterator yielding environments of our type on the stack, which are not us""" for ctx in (self._app or bapp.main()).context().stack(): # we should be last, but lets not assume that if ctx is self or not isinstance(ctx, StackAwareHierarchicalContext): continue yield ctx
def _instantiate_plugins(self, settings): """Create compatible plugin instances and put them onto their own environment. We will initialize them with everything they need""" num_plugins = None # This would allow us to reload, assuming the state was saved previously stack = bapp.main().context() if self._plugin_context: stack.remove(self._plugin_context) # end pop previous context self._plugin_context = stack.push('%s-plugins' % self.LOG_NAME) for num_plugins, plugin_type in enumerate(stack.types(EventEnginePlugin)): plugin_prefix = '%s.plugin.%s.log' % (self.LOG_NAME, plugin_type.plugin_name()) log = logging.getLogger(plugin_prefix) set_emails_on_logger(log, settings.logging.email, True) log.setLevel(self.log.level) assert plugin_type._auto_register_instance_, 'plugin-instances are expected to be auto-registered' if settings.logging['one-file-per-plugin']: set_file_path_on_logger(log, settings.logging['plugin-log-tree'].expand_or_raise() / plugin_prefix) # end setup file logging plugin_type(self._sg, log) # end for each plugin to create if num_plugins is None: stack.pop() self._plugin_context = None else: # Make sure that newly loaded events have proper state. self._load_event_id_data()
def test_base(self): cmd = BeCommand(bapp.main()) assert cmd.parse_and_execute("foo".split()) != 0, "command didn't exist" assert cmd.parse_and_execute([TestBeSubCommand.name]) == 0, "command did exist" configured_name = "new-name" assert cmd.info_data().name == configured_name, "it's possible to rename a be command"
def test_simple_command(self): """Standard command features test""" cmd = SimpleCommand(application=bapp.main()) assert cmd.parse_and_execute(list()) == cmd.ARGUMENT_ERROR, 'cannot execute without -x' assert cmd.parse_and_execute(['-h']) == cmd.ARGUMENT_HANDLED, 'can show help without error code' assert cmd.parse_and_execute(['--version']) == cmd.ARGUMENT_HANDLED, 'can show version' assert cmd.parse_and_execute(['-x', 'foo']) == 3, 'cannot use foo as int' assert cmd.parse_and_execute(['-x', '5']) == 0, 'first valid call'
def test_application(self): """Test BApplication functionality""" app = bapp.main() class ICustomInterface(Interface): __slots__ = () @abstractmethod def hello(self): return str() # end class ICustomInterface # It is possible to setup plugins even without an Application class CustomPluginType(ICustomInterface, bapp.plugin_type()): """Works without application""" __slots__ = () def hello(self): return "world" # end class CustomPluginType # we don't have access to the stack without an application, so lets make one assert bapp.Application.main is bapp.main() is app self.failUnlessRaises(InstanceNotFound, bapp.main().instance, float) self.failUnlessRaises(InstanceNotFound, bapp.main().new_instance, float) self.failUnlessRaises(TypeNotFound, bapp.main().type, float) # As there is no instance, this one won't find one either self.failUnlessRaises(InstanceNotFound, bapp.main().instance, ICustomInterface) # the custom type can already be found assert bapp.main().type(ICustomInterface) is CustomPluginType # instance is cought by the associated context automatically. Early types will always go to the # current main application inst = CustomPluginType() assert bapp.main().instance(ICustomInterface) is inst # create a new instance new_inst = bapp.main().new_instance(ICustomInterface) assert new_inst is not inst assert bapp.main().instance(ICustomInterface) is inst, "new_inst should not be owned by context"
def settings_value(cls, context=None, resolve=True): """@return a nested dict with getattr access as obtained from the current ContextStack's context, validated against our schema. @param cls @param context if not None, use the given context (KeyValueStoreProvider) instead of the global one @param resolve if True, string values will be resolved @note use this method when you need access to the datastructure matching your schema""" return (context or bapp.main().context().settings()).value_by_schema(cls.settings_schema(), resolve=resolve)
def setup(self, delegate, schema, *args, **kwargs): """Initialize this instance with the delegate itself - it will not be stored permanently, but is uesd for callbacks @param delegate the one who has instantiated this instance. @param schema the schema to use for the override @param args arguments to be passed to set_context_override() @param kwargs kwargs to be passed to set_context_override() @return self @note we will put ourselves onto the environment stack for convenience""" new_value = bapp.main().context().settings().value(schema.key(), schema) delegate.set_context_override(schema, new_value, *args, **kwargs) # find only the changed values and write them as kvstore prev_value = bapp.main().context().settings().value(schema.key(), schema) delegate = self.DifferenceDelegate() TwoWayDiff().diff(delegate, prev_value, new_value) self._kvstore.set_value(schema.key(), delegate.result()) return bapp.main().context().push(self)
def _find_compatible_subcommands(self): """@return a list or tuple of compatible ISubCommand instances. Must contain at least one subcommand instance @note the base implementation searches the current environment stack for it""" res = list() ctx = (self.application() or bapp.main()).context() for scmd in ctx.new_instances(ISubCommand, kwargs={'application': self.application()}): if scmd.is_compatible(self): res.append(scmd) # end # end for each command return res
def data(cls, key, kvstore=None): """@return the datablock suitable for instantiating an action with, based on the action's schema @throw ValueError if the given data key does not exist @param cls @param key key at which the kvstore should be sampled, usually created by data_key() @param kvstore store to use. Default store will be used if None""" kvstore = kvstore or bapp.main().context().settings() assert cls.action_schema is not None, "'action_schema' to be set in subclass" if not kvstore.has_value(key): raise ValueError("Action at key '%s' doesn't exist" % key) return kvstore.value(key, cls.action_schema, resolve=True)
def test_base(self): pbe = ApplicationContext('test_pb') ose = OSContext('test_os') app = bapp.main() app.context().push(pbe) app.context().push(ose) # Test Validation validator = app.context().schema_validator() assert len(validator) > 0 assert len(validator.validate_schema()[1]) == 0, "default schema's should have no clashes"
def action(self, key): """@return an action implementation matching the given name key @note this implementation looks for registered instances @throw AssertionError if no suitable action was found""" tokens = key.split('.') assert len(tokens) > 2, "expected action key of format %s.type_name.name[.name...]" % action_schema.key() type_name = tokens[1] for cls in bapp.main().context().types(PackageAction): if cls.type_name == type_name: return cls # end for each class raise AssertionError("Couldn't find action of type '%s'" % type_name)
def run(): """run the tractor-provided script""" provider = bapp.main().context().new_instance(ITractorProcessDataProvider) store = provider.as_kvstore() assert store is not None, "Must be executed via tractor and receive contextual data - don't now what to run" data = store.value(MayaBatchTaskGenerator.static_field_schema.key(), MayaBatchTaskGenerator.static_field_schema) # The evaluated string has access to our data and store ###################### log.info(data.cmd.python) exec(data.cmd.python)
def test_post_launch_info(self): info = ControlledProcessInformation() if not info.has_data(): assert info.data() is None and info.process_data() is None else: pinfo = info.process_data() assert pinfo.executable.isfile() assert pinfo.executable.dirname().isdir() assert pinfo.id app = ProcessAwareApplication.new() assert bapp.main().context().settings().value_by_schema(process_schema).executable == pinfo.executable # end handle data assert ProcessAwareApplication.process_information() is info, "process information should be a singleton"
def _init_contents(self): """Fill generators and initial information""" # For now we insert JobGenerators here - in multi-mode this might be a multi-file generator self.setWindowTitle('%s (%s)' % (self.name, self.version)) chains = list() JobGeneratorType = self._multifile_mode() and MultiJobGenerator or JobGenerator for chain in bapp.main().context().new_instance(ITractorNodeGeneratorChainProvider).chains(): chains.append(chain.prepend_head(JobGeneratorType())) if self._multifile_mode(): # by default, we are in single-job mode - one job per button press chain.head().set_task_mode(True) chain.prepend_head(JobGenerator()) # end handle additional structure #end for each chain self.ui.selector.set_plugins(chains)
def test_master_command(self): """Simple subcommand testing""" cmd = MainCommand(application=bapp.main()) cmd.parse_and_execute([]) == 1, 'auto-print usage for subcommands' cmd.parse_and_execute(['-h']) == cmd.ARGUMENT_HANDLED, 'can show help' cmd.parse_and_execute(['--version']) == cmd.ARGUMENT_HANDLED, 'can show version' cmd.parse_and_execute(['simple']) == 255, 'should require argument' cmd.parse_and_execute('simple -x foo'.split()) == 255, 'need int' cmd.parse_and_execute('simple -x 42'.split()) == 0, 'should be good' cmd.parse_and_execute('simple -x 42 --extra'.split()) == 1, 'extra args not allowed by default' MainCommand.allow_unknown_args = True cmd.parse_and_execute('simple -x 42 --extra'.split()) == 0, 'can allow extra args' MainCommand.allow_unknown_args = False assert InputError, "Let's be sure this makes it into bcmd"
def test_base(self): """verify basic functionality""" ctrl = TestProcessController(bapp.main().context()) ctrl.set_static_stack_len() assert len(ctrl.pop_asset_context()) == 0, 'should have popped nothing, but its okay' kv_a = YAMLKeyValueStoreModifier([self.fixture_path('process_config_a.yaml')]) kv_a_changed_version = YAMLKeyValueStoreModifier([self.fixture_path('process_config_a_changed_version.yaml')]) kv_a_changed_requires = YAMLKeyValueStoreModifier([self.fixture_path('process_config_a_changed_requires.yaml')]) # this shouldn't raise anything ctrl._check_process_compatibility(kv_a, kv_a, program = 'foo') self.failUnlessRaises(ProcessConfigurationIncompatibleError, ctrl._check_process_compatibility, kv_a_changed_version, kv_a, 'foo') self.failUnlessRaises(ProcessConfigurationIncompatibleError, ctrl._check_process_compatibility, kv_a_changed_requires, kv_a, 'foo')
def cmd(self, *args, **kwargs): """:return: Instance of your spawned command cmd which was provided with the given arguments and executed. If you are using this function, you must be sure to use the with_application decorator :param kwargs: * cwd: Current working dir to set for the duration of the command * return_stderr: if True, default False, stderr will be returned additionally * fail_on_stderr: if True, default True, we will fail with an assertion error if the command outputs anything to stderr :return: list(linestdout,...) tuple(list(linestdout,...), list(linestderr,...))""" cur_dir = os.getcwd() os.chdir(kwargs.get('cwd', cur_dir)) sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() prev_trace = git.cmd.Git.GIT_PYTHON_TRACE git.cmd.Git.GIT_PYTHON_TRACE = False return_stderr = kwargs.get('return_stderr', False) fail_on_stderr = kwargs.get('fail_on_stderr', False) cmd = PGitCommand(application=bapp.main()) try: returncode = cmd.parse_and_execute(list(str(a) for a in (self.subcommand_name,) + args)) err = sys.stderr.getvalue() if err and fail_on_stderr: raise AssertionError("Command Failed: %s" % err) # end handle errors right away, if requested out = sys.stdout.getvalue().splitlines() if return_stderr: return returncode, out, err else: return returncode, out # END handle finally: sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ git.cmd.Git.GIT_PYTHON_TRACE=prev_trace os.chdir(cur_dir) # END handle default channels return cmd.parse_and_execute(args)
def set_chain(self, chain, context): """Set our instance to display the given chain, using the given context as data source GUI will be initialized from the given context's values based on known Generators found in the chain. Generators are visualized by mapping respective GUI elements to them, which in turn are used to write their values back into the data set. @param chain a NodeGeneratorChain compatible instance, or None which is when this instance will be cleared. @param context KeyValueStore retrieved from the given chain, or None to clear this instance. @return the chain's context we are representing, as retrieved by chain.default_context()""" self._clear_widgets() widget_types = bapp.main().context().types(GeneratorPropertiesWidgetBase) self._context = context if chain is None or context is None: label = QtGui.QLabel(self) label.setText("Nothing to display") label.setAlignment(QtCore.Qt.AlignCenter) self.layout().addWidget(label) return # end handle clearing # Create GUI elements generator = chain.head() while generator: # If we have a widget, show it, otherwise bail out for cls in widget_types: if cls.is_compatible(generator): widget = cls(type(generator).__name__, self) assert generator.static_field_schema, "generators with gui need a static field schema" widget.init(generator, context.value_by_schema(generator.static_field_schema)) self.layout().addWidget(widget) # make sure we get only one ! break # end if widget is compatible # end for each widget type generator = generator.next() # end for each generator # Finally make sure it is stuck to the top self.layout().addItem(QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding))
def test_basic_operation(self): """general testing with sandboxed Application instance""" cmd = DaemonCommand(application=bapp.main()) call = cmd.parse_and_execute assert call('-h'.split()) == DaemonCommand.ARGUMENT_HANDLED, "this just shows the help" assert call(['-c']) == DaemonCommand.ERROR, "no configuration without compatible thread type" # try running the thread and kill it with signals # check for windows and/or python support if not hasattr(os, 'kill'): raise SkipTest('need os.kill') # end # daemon thread should terminate naturally cmd.kill_after_s = 0.5 DaemonThread.work_time_s = 0.1 assert call([]) == 0, "it should run without issues" DaemonThread.work_time_s = 1.0 cmd.kill_after_s = 0.1 assert call([]) == 0
def _executable_package_names(self): """@return a list of program names that are executable, based on our context We have to emulate the behaviour of the process controller, using it's own functionality """ res = list() packages = self.settings_value(bapp.main().context().settings()) becmd = self._main_command_name() for package_name in list(packages.keys()): if package_name == becmd: continue # end don't place ourselves to prevent the guys from calling themselves just for fun ;) package = self._to_package(package_name, packages[package_name]) package = ProcessController._resolve_package_alias(package, lambda n: self._to_package(n, packages[n])) try: # this raises if there is nothing package.executable(os.environ) res.append(package_name) except Exception: continue # end handle no executable configured # end for each package return res
def _iter_plugins(self): """@return iterator over all our plugin instances""" return iter(bapp.main().context().instances(EventEnginePlugin))
def test_nested_command(self): cmd = Nested(application=bapp.main()) assert cmd.parse_and_execute([NestedCommand.name, NestedSubCommand.name]) == NestedSubCommand.call_magic, \ 'nesting should work just fine'
def chains(self): return bapp.main().context().new_instance(NodeGeneratorChain)
def test_proxy_delegate(self): pctrl = TestProcessController(pseudo_executable('proxied_app'), application=bapp.main()) assert isinstance(pctrl.delegate(), ProxyProcessControllerDelegate) assert pctrl.delegate().launch_mode() not in ProcessControllerDelegate.launch_modes assert pctrl.delegate()._proxy.launch_mode() in ProcessControllerDelegate.launch_modes
def test_nosetest_delegate(self): pctrl = TestProcessController(pseudo_executable('nosetests-delegate'), ['---dry-run'], application=bapp.main()) assert pctrl.execute().returncode == 0
def settings_id(self): """@return our settings id @note this implementation is project-aware, which is why it is recommended to set a project accordingly.""" return self.settings_prefix + '%s.%s' % (bapp.main().new_instance(bapp.IProjectService).id(), self._package_name)
def _check_transactions(self, session): """Query all transactions which don't have an 'needs approval' marker, but are not yet queued, and queue them if possible.""" PT = SQLPackageTransaction for sql_trans in session.transactions().filter((PT.finished_at == None) & (PT.percent_done == None) & (PT.approved_by_login != None) & (PT.approved_by_login != PT.TO_BE_APPROVED_MARKER)): assert sql_trans.in_package, "For some reason, there is a transaction without in-package, check %s" % sql_trans sql_package = sql_trans.in_package try: db = self.dropbox_by_contained_path(sql_package.root()) except ValueError: log.error("Couldn't find dropbox managing path at '%s' - dropbox might have been removed without removing transactions, which shouldn't happen ! Will cancel related transaction %s", sql_package.root(), sql_trans) # fix this sql_trans.cancel().commit() continue # end play it really save # It could be possible that we get here without the dropbox having had the chance to parse its # trees yet. # This happens if we just restarted, but if there are still transactions in the DB # We don't want to trigger an update of the trees here, which is why we skip this entirely if db.last_tree_sample_time() is None: log.info("Didn't have parsed packages trees for dropbox %s yet - waiting until next transaction check", db) continue # end skip no cached trees # find package matching the sql package package = None for pkg in db.iter_packages(): if ((pkg.tree_root() == sql_package.tree_root()) and (pkg.root_relative() == sql_package.root_relative())): package = pkg break # end check for matching tree_root and package relative path # end for each package # This also indicates inconsistency - make sure we don't see the transaction again ! if package is None: log.error("Failed to find matching package for sql package %s. Will cancel related transaction %s", sql_package, sql_trans) sql_trans.cancel().commit() continue # end handle inconsistency # if we are here, there is no queued transaction, and we want to recheck for approval auth_token = sql_trans.authentication_token(user_group=self._config.authentication.privileged_group) if auth_token in (PT.AUTH_OK, PT.AUTH_NOT_NEEDED): # NOTE: if it wouldn't need approval, we wouldn't even be here as it would be running or # finished # ... and queue the transaction trans_cls = self._transaction_cls_by_name(bapp.main().context().types(DropboxTransactionBase), sql_trans.type_name) assert trans_cls is not None, "Couldn't find transaction's implementation even though it was previously created by us" trans = trans_cls(log, sql_instance=sql_trans, dropbox_finder=self, package=package, kvstore=self.merged_kvstore(trans_cls, db)) log.info("Queuing approved transaction %s", trans) sql_trans.set_queued().commit() self._ops_queue.put(trans.apply) elif auth_token is PT.AUTH_FAILURE: log.debug("Resetting invalid authentication login name") sql_trans.approved_by_login = sql_trans.TO_BE_APPROVED_MARKER session.commit() elif auth_token is PT.AUTH_REJECTED: # Just cancel the transaction. # Usually, can_enqueue implementations will prevent themselves to be executed if a previous # package of their type was rejected. sql_trans.cancel() session.commit() else: log.debug("Skipping unapproved transaction %s", sql_trans)
def application_context(*paths, **kwargs): """A context manager which sets up a a context based on the given file paths. To achieve that, it will alter the current global context as defined in bapp.main().context() to contain all environments obtained when creating StackAwareHierarchicalContext instances for all the given paths. @return returned value is the altered bapp.main().context() instance, just for convenience @note this will temporarily change the bapp.main().context(), which is a rather expensive operation both in terms of IO and CPU @param paths any path that should be used to define the future context. If empty, the current environment will not be altered. Each path should be a directory ! @param kwargs valid keys are + load_plugins default False, if True, plugins will be loaded for all given paths. @note usage: application_context(scene, executable, cwd) as env: env.context() ...""" if not paths: yield bapp.main().context() raise StopIteration # end handle empty paths # This is potentially dangerous, but we only assume to find the pipeline base environment which is # supposed to hold the main pipeline configuration, and which must exist. We will keep this one, # but recreate all others based on the input paths size = -1 for index, env in enumerate(bapp.main().context().stack()): if isinstance(env, ControlledProcessContext): size = index + 1 break # end check for special environment # end for each env assert size > -1, "Didn't find ControlledProcessContext on stack" popped_environments = list() try: while len(bapp.main().context()) > size: popped_environments.append(bapp.main().context().pop()) # end pop environments for path in paths: env = bapp.main().context().push(StackAwareHierarchicalContext(path)) if kwargs.get('load_plugins', False): env.load_plugins() # end handle plugins # end for each path yield bapp.main().context() finally: if len(bapp.main().context()) > size: bapp.main().context().pop(until_size=size) # end only pop if it makes sense # put all environments back, after removing previous ones for env in reversed(popped_environments): bapp.main().context().push(env)