def schema(): resource = deploy.url() base = { Required("to"): "tftp", Required("kernel", msg="needs a kernel to deploy"): deploy.url({Optional("type"): Any("image", "uimage", "zimage")}), Optional("dtb"): resource, Optional("modules"): resource, Optional("preseed"): resource, Optional("ramdisk"): deploy.url({ Optional("install_modules"): bool, Optional("install_overlay"): bool, Optional("header"): "u-boot", }), Exclusive("nfsrootfs", "nfs"): deploy.url({ Optional("install_modules"): bool, Optional("install_overlay"): bool, Optional("prefix"): str, }), Exclusive("persistent_nfs", "nfs"): { Required("address"): str, Optional("install_overlay"): bool, }, } return {**deploy.schema(), **base}
def _timeout_schema(): return Schema({ Exclusive('days', 'timeout_unit'): int, Exclusive('hours', 'timeout_unit'): int, Exclusive('minutes', 'timeout_unit'): int, Exclusive('seconds', 'timeout_unit'): int })
def schema(): base = { Required("to"): Any("sata", "sd", "usb"), Exclusive("images", "image"): { Required("image"): deploy.url(), Optional(str): deploy.url(), }, Exclusive("image", "image"): deploy.url(), Required("device"): str, Optional("download"): { Required("tool"): str, Required("options"): str, Required("prompt"): str, }, Optional("writer"): { Required("tool"): str, Required("options"): str, Required("prompt"): str, }, Optional("tool"): { Required("prompts"): [str], }, Optional("uniquify"): bool, **deploy.schema(), } return {**deploy.schema(), **base}
def _timeout_schema(): return Schema({ Exclusive("days", "timeout_unit"): int, Exclusive("hours", "timeout_unit"): int, Exclusive("minutes", "timeout_unit"): int, Exclusive("seconds", "timeout_unit"): int, Optional("skip"): bool, })
def _timeout_schema(): return Schema({ Exclusive('days', 'timeout_unit'): int, Exclusive('hours', 'timeout_unit'): int, Exclusive('minutes', 'timeout_unit'): int, Exclusive('seconds', 'timeout_unit'): int, Optional('skip'): bool, })
class ShellProvisioner(Provisioner): """ Allows to perform provisioning shell operations on the host/guest sides. """ name = 'shell' schema = { Exclusive('inline', 'shelltype'): str, Exclusive('script', 'shelltype'): IsFile(), 'side': Any('guest', 'host'), } def provision_single(self, guest): """ Executes the shell commands in the guest container or in the host. """ if 'script' in self.options and self._is_for_guest: # First case: we have to run the script inside the container. So the first step is # to copy the content of the script to a temporary file in the container, ensure # that the script is executable and then run the script. guest_scriptpath = os.path.join( '/tmp/', os.path.basename(self.options['script'])) with open(self.homedir_expanded_path( self.options['script'])) as fd: guest.lxd_container.files.put(guest_scriptpath, fd.read()) guest.run(['chmod', '+x', guest_scriptpath]) guest.run([ guest_scriptpath, ], quiet=False) elif 'script' in self.options and self._is_for_host: # Second case: the script is executed on the host side. self.host.run([ self.homedir_expanded_path(self.options['script']), ]) elif 'inline' in self.options: # Final case: we run a command directly inside the container or outside. host_or_guest = self.host if self._side == 'host' else guest host_or_guest.run(['sh', '-c', self.options['inline']], quiet=False) def setup(self): # nothing to set up, avoid spurious messages with this override. pass ################################## # PRIVATE METHODS AND PROPERTIES # ################################## @property def _is_for_guest(self): """ Returns True if this provisioner should run on the guest side. """ return self._side == 'guest' @property def _is_for_host(self): """ Returns True if this provisioner should run on the host side. """ return self._side == 'host' @property def _side(self): return self.options.get('side', 'guest')
def notify(): callback = { Required("url"): str, Optional("method"): Any("GET", "POST"), Optional("token"): str, Optional("header"): str, Optional("dataset"): Any("minimal", "logs", "results", "all"), Optional("content-type"): Any("json", "urlencoded"), } return { Required("criteria"): Any( { Required("status"): Any("finished", "running", "complete", "canceled", "incomplete"), Optional("dependency_query"): str, }, { Required("status"): Any("complete", "incomplete"), Optional("type"): Any("regression", "progression"), }, ), Optional("verbosity"): Any("verbose", "quiet", "status-only"), Optional("recipients"): [{ Required("to"): { Required("method"): Any("email", "irc"), Optional("user"): str, Optional("email"): str, Optional("handle"): str, Optional("server"): str, } }], Exclusive("callback", "callback"): callback, Exclusive("callbacks", "callback"): [callback], Optional("compare"): { Optional("blacklist"): [str], Optional("query"): Any( { Required("username"): str, Required("name"): str }, { Required("entity"): str, Optional("conditions"): { str: str } }, ), }, }
class Senpy: validate = Schema({ Required('channel'): str, Required('event'): str, Required("user"): str, Required('params'): { Exclusive("SenpyEmotion", "SenpyGroup"): str, Exclusive("SenpySentiment", "SenpyGroup"): str, }, }, extra=ALLOW_EXTRA)
def _job_notify_schema(): return Schema( { Required('criteria'): _notify_criteria_schema(), 'recipients': _recipient_schema(), Exclusive('callback', 'legacy_callback'): _legacy_callback_schema(), Exclusive('callbacks', 'legacy_callback'): _callback_schema(), 'verbosity': Any('verbose', 'quiet', 'status-only'), 'compare': _notify_compare_schema() }, extra=True)
def _job_notify_schema(): return Schema( { Required("criteria"): _notify_criteria_schema(), "recipients": _recipient_schema(), Exclusive("callback", "legacy_callback"): _legacy_callback_schema(), Exclusive("callbacks", "legacy_callback"): _callback_schema(), "verbosity": Any("verbose", "quiet", "status-only"), "compare": _notify_compare_schema(), }, extra=True, )
class SmartPhone: validate = Schema( { Required('channel'): str, Required('event'): str, Required("user"): str, Required('params'): { Exclusive("EventCalendar", "SmartPhoneGroup"): str, Exclusive("ToastNotification", "SmartPhoneGroup"): str, Exclusive("NavbarNotification", "SmartPhoneGroup"): str, }, }, extra=ALLOW_EXTRA)
class PresenceSensor: validate = Schema( { Required('channel'): str, Required('event'): str, Required("user"): str, Required('params'): { Inclusive("PresenceSensorID", "PresenceSensorGroup"): str, Exclusive("PresenceDistance", "PresenceSensorGroup"): float, Exclusive("PresenceTime", "PresenceSensorGroup"): str, }, }, extra=ALLOW_EXTRA)
class Presence: validate = Schema( { Required('channel'): str, Required('event'): str, Required("user"): str, Required('params'): { Inclusive("sensorID", "grupo"): str, Exclusive("distance", "grupo"): str, Exclusive("time", "grupo"): str, }, }, extra=ALLOW_EXTRA)
class PlantSensor: validate = Schema( { Required('channel'): str, Required('event'): str, Required("user"): str, Required('params'): { Inclusive("PlantSensorID", "PlantSensorGroup"): str, Exclusive("PlantMoisture", "PlantSensorGroup"): float, Exclusive("PlantLight", "PlantSensorGroup"): float, Exclusive("PlantTemperature", "PlantSensorGroup"): float, Exclusive("PlantConductivity", "PlantSensorGroup"): float, }, }, extra=ALLOW_EXTRA)
class SmartLight: validate = Schema( { Required('channel'): str, Required('event'): str, Required("user"): str, Required('params'): { Optional("LightPublicIP"): str, Optional("LightApiToken"): str, Exclusive("LightBrightness", "LightGroup"): int, Exclusive("LightColor", "LightGroup"): int, Required("LightID"): str, }, }, extra=ALLOW_EXTRA)
def test_exclusive(): schema = Schema({ Exclusive('x', 'stuff'): int, Exclusive('y', 'stuff'): int, }) r = schema({}) assert_equal(r, {}) r = schema({'x': 1}) assert_equal(r, {'x': 1}) try: r = schema({'x': 1, 'y': 2}) except MultipleInvalid as e: assert_equal(str(e), "two or more values in the same group of exclusion 'stuff' @ data[<stuff>]") else: assert False, "Did not raise Invalid for multiple values in Exclusive group"
def test_description(): marker = Marker(Schema(str), description='Hello') assert marker.description == 'Hello' optional = Optional('key', description='Hello') assert optional.description == 'Hello' exclusive = Exclusive('alpha', 'angles', description='Hello') assert exclusive.description == 'Hello' required = Required('key', description='Hello') assert required.description == 'Hello'
class HumiditySensor: validate = Schema( { Required('channel'): str, Required('event'): str, Required("user"): str, Required('params'): { Inclusive("HumiditySensorID", "HumiditySensorGroup"): str, Exclusive("HumidityLevel", "HumiditySensorGroup"): float, }, }, extra=ALLOW_EXTRA)
Optional('dependencies'): task_description_schema['dependencies'], Optional('soft-dependencies'): task_description_schema['soft-dependencies'], Optional('requires'): task_description_schema['requires'], Optional('expires-after'): task_description_schema['expires-after'], Optional('routes'): task_description_schema['routes'], Optional('scopes'): task_description_schema['scopes'], Optional('tags'): task_description_schema['tags'], Optional('extra'): task_description_schema['extra'], Optional('treeherder'): task_description_schema['treeherder'], Optional('index'): task_description_schema['index'], Optional('run-on-projects'): task_description_schema['run-on-projects'], Optional('shipping-phase'): task_description_schema['shipping-phase'], Optional('shipping-product'): task_description_schema['shipping-product'], Optional('coalesce'): task_description_schema['coalesce'], Optional('always-target'): task_description_schema['always-target'], Exclusive('optimization', 'optimization'): task_description_schema['optimization'], Optional('needs-sccache'): task_description_schema['needs-sccache'], Optional('release-artifacts'): task_description_schema['release-artifacts'], Optional('priority'): task_description_schema['priority'], # The "when" section contains descriptions of the circumstances under which # this task should be included in the task graph. This will be converted # into an optimization, so it cannot be specified in a job description that # also gives 'optimization'. Exclusive('when', 'optimization'): { # This task only needs to be run if a file matching one of the given # patterns has changed in the push. The patterns use the mozpack # match function (python/mozbuild/mozpack/path.py). Optional('files-changed'): [text_type], },
def list_dict_option_schema(for_collection, plugin_type): if plugin_type == 'module': option_types = Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str') element_types = option_types else: option_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'none', 'path', 'tmp', 'temppath', 'tmppath', 'pathspec', 'pathlist', 'str', 'string', 'raw') element_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'path', 'str', 'string', 'raw') basic_option_schema = { Required('description'): doc_string_or_strings, 'required': bool, 'choices': list, 'aliases': Any(list_string_types), 'version_added': version(for_collection), 'version_added_collection': collection_name, 'default': json_value, # Note: Types are strings, not literal bools, such as True or False 'type': option_types, # in case of type='list' elements define type of individual item in list 'elements': element_types, } if plugin_type != 'module': basic_option_schema['name'] = Any(*string_types) deprecated_schema = All( Schema( All( { # This definition makes sure everything has the correct types/values 'why': doc_string, 'alternatives': doc_string, # vod stands for 'version or date'; this is the name of the exclusive group Exclusive('removed_at_date', 'vod'): date(), Exclusive('version', 'vod'): version(for_collection), 'collection_name': collection_name, }, { # This definition makes sure that everything we require is there Required('why'): Any(*string_types), 'alternatives': Any(*string_types), Required(Any('removed_at_date', 'version')): Any(*string_types), Required('collection_name'): Any(*string_types), }, ), extra=PREVENT_EXTRA ), partial(check_removal_version, version_field='version', collection_name_field='collection_name', error_code='invalid-removal-version') ) env_schema = All( Schema({ Required('name'): Any(*string_types), 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, }, extra=PREVENT_EXTRA), partial(version_added, error_code='option-invalid-version-added') ) ini_schema = All( Schema({ Required('key'): Any(*string_types), Required('section'): Any(*string_types), 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, }, extra=PREVENT_EXTRA), partial(version_added, error_code='option-invalid-version-added') ) vars_schema = All( Schema({ Required('name'): Any(*string_types), 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, }, extra=PREVENT_EXTRA), partial(version_added, error_code='option-invalid-version-added') ) cli_schema = All( Schema({ Required('name'): Any(*string_types), 'option': Any(*string_types), 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, }, extra=PREVENT_EXTRA), partial(version_added, error_code='option-invalid-version-added') ) keyword_schema = All( Schema({ Required('name'): Any(*string_types), 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, }, extra=PREVENT_EXTRA), partial(version_added, error_code='option-invalid-version-added') ) basic_option_schema.update({ 'env': [env_schema], 'ini': [ini_schema], 'vars': [vars_schema], 'cli': [cli_schema], 'keyword': [keyword_schema], 'deprecated': deprecated_schema, }) suboption_schema = dict(basic_option_schema) suboption_schema.update({ # Recursive suboptions 'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)), }) suboption_schema = Schema(suboption_schema, extra=PREVENT_EXTRA) # This generates list of dicts with keys from string_types and suboption_schema value # for example in Python 3: {str: suboption_schema} list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types] option_schema = dict(basic_option_schema) option_schema.update({ 'suboptions': Any(None, *list_dict_suboption_schema), }) option_schema = Schema(option_schema, extra=PREVENT_EXTRA) option_version_added = Schema( All({ 'suboptions': Any(None, *[{str_type: Self} for str_type in string_types]), }, partial(version_added, error_code='option-invalid-version-added')), extra=ALLOW_EXTRA ) # This generates list of dicts with keys from string_types and option_schema value # for example in Python 3: {str: option_schema} return [{str_type: All(option_schema, option_version_added)} for str_type in string_types]
task_description_schema['extra'], Optional('treeherder'): task_description_schema['treeherder'], Optional('index'): task_description_schema['index'], Optional('run-on-projects'): task_description_schema['run-on-projects'], Optional('shipping-phase'): task_description_schema['shipping-phase'], Optional('shipping-product'): task_description_schema['shipping-product'], Optional('coalesce'): task_description_schema['coalesce'], Optional('always-target'): task_description_schema['always-target'], Exclusive('optimization', 'optimization'): task_description_schema['optimization'], Optional('needs-sccache'): task_description_schema['needs-sccache'], Optional('release-artifacts'): task_description_schema['release-artifacts'], # The "when" section contains descriptions of the circumstances under which # this task should be included in the task graph. This will be converted # into an optimization, so it cannot be specified in a job description that # also gives 'optimization'. Exclusive('when', 'optimization'): Any({ # This task only needs to be run if a file matching one of the given # patterns has changed in the push. The patterns use the mozpack # match function (python/mozbuild/mozpack/path.py).
from tdt.validators import SchemaCheck from tdt.validators.job_file.filter_validator import FilterValidator from tdt.defaults.colors import colors # Debugging from prettyprinter import pprint as pp # Creating a label supports a few options... more complex than Basic, but nowhere near as elaborate as Filtered! _create_obj = { # User is REQUIRED to indicate from where we'll name the project(s) # User can create a project either by name *or* from a search (currently, only filter results) Required('from'): { # When using a simple name, we just need a string Exclusive('name', 'project.name.source'): Any(str, Length(min=1)), # When using filter(s) as the source of the project(s) name, validation is a bit more complex. # So for now, we just make sure that only name or filters is provided and if filters is provided # that the user has given us a list of at least one object. Later, we'll validate each object ## Exclusive('filters', 'project.name.source'): All(list, Length(min=1)), }, # Must be between 30 and 40 or one of a fwe strings # See: https://developer.todoist.com/sync/v8/#colors # See: https://github.com/VoIlAlex/todoist-colors Optional('color', default=None): Any( In( list(colors.values())+list(colors.keys())
Optional('build-signing-label'): basestring, # the build's attributes 'build-attributes': {basestring: object}, # the platform on which the tests will run 'test-platform': basestring, # the name of the test (the key in tests.yml) 'test-name': basestring, # the product name, defaults to firefox Optional('product'): basestring, # conditional files to determine when these tests should be run Exclusive(Optional('when'), 'optimization'): Any({ Optional('files-changed'): [basestring], }), # Optimization to perform on this task during the optimization phase. # Optimizations are defined in taskcluster/taskgraph/optimize.py. Exclusive(Optional('optimization'), 'optimization'): OptimizationSchema, # The SCHEDULES component for this task; this defaults to the suite # (not including the flavor) but can be overridden here. Exclusive(Optional('schedules-component'), 'optimization'): basestring, Optional('worker-type'): optionally_keyed_by( 'test-platform', Any(basestring, None), ),
}, # the platform on which the tests will run 'test-platform': basestring, # the name of the test (the key in tests.yml) 'test-name': basestring, # the product name, defaults to firefox Optional('product'): basestring, # conditional files to determine when these tests should be run Exclusive(Optional('when'), 'optimization'): Any({ Optional('files-changed'): [basestring], }), # Optimization to perform on this task during the optimization phase. # Optimizations are defined in taskcluster/taskgraph/optimize.py. Exclusive(Optional('optimization'), 'optimization'): OptimizationSchema, # The SCHEDULES component for this task; this defaults to the suite # (not including the flavor) but can be overridden here. Exclusive(Optional('schedules-component'), 'optimization'): basestring, Optional('worker-type'): optionally_keyed_by(
All(Coerce(int), Range(0, 65535), msg='background must be int between 0 - 65,535'), 'napodize': All(Coerce(int), Range(0, 400), msg='napodize must be int between 0 - 400'), 'nzblend': All(Coerce(int), Range(0, 100), msg='nzblend must be int between 0 - 100'), 'NA': All(Coerce(float), Range(0.2, 1.33), msg='NA must be float between 0.2 - 1.33'), Exclusive('RL', 'iterations'): All(Coerce(int), Range(0, 30), msg='RL (nIters) must be int between 0 - 30'), Exclusive('nIters', 'iterations'): All(Coerce(int), Range(0, 30), msg='RL (nIters) must be int between 0 - 30'), 'deskew': All(Coerce(float), Range(-180, 180), msg='deskew angle must be float between -180 and 180'), 'width': All(Coerce(int), Range(0, 2000), msg='width must be int between 0 - 2000'),
Optional("if-dependencies"): task_description_schema["if-dependencies"], Optional("soft-dependencies"): task_description_schema["soft-dependencies"], Optional("if-dependencies"): task_description_schema["if-dependencies"], Optional("requires"): task_description_schema["requires"], Optional("expires-after"): task_description_schema["expires-after"], Optional("routes"): task_description_schema["routes"], Optional("scopes"): task_description_schema["scopes"], Optional("tags"): task_description_schema["tags"], Optional("extra"): task_description_schema["extra"], Optional("treeherder"): task_description_schema["treeherder"], Optional("index"): task_description_schema["index"], Optional("run-on-projects"): task_description_schema["run-on-projects"], Optional("shipping-phase"): task_description_schema["shipping-phase"], Optional("shipping-product"): task_description_schema["shipping-product"], Optional("always-target"): task_description_schema["always-target"], Exclusive("optimization", "optimization"): task_description_schema[ "optimization" ], Optional("use-sccache"): task_description_schema["use-sccache"], Optional("release-artifacts"): task_description_schema["release-artifacts"], Optional("priority"): task_description_schema["priority"], # The "when" section contains descriptions of the circumstances under which # this task should be included in the task graph. This will be converted # into an optimization, so it cannot be specified in a job description that # also gives 'optimization'. Exclusive("when", "optimization"): { # This task only needs to be run if a file matching one of the given # patterns has changed in the push. The patterns use the mozpack # match function (python/mozbuild/mozpack/path.py). Optional("files-changed"): [text_type], },
class Fakear: """ The main class, the one that creates fake programs from yaml configuration file or a dict. Fakear instances can be initialized by 2 ways: - Fakear(cfg="cfg_path/cfg_file.yml") # from a YAML File - Fakear(raw=fakedata) # from a dict Configuration should look like this: --- __command1_name__: # Default fake program that output "I am a fake binary !" __command2_name__ : # Sub command with args - args: - first_arg - sec_arg return_code: 0 output: This is an example of fake command # Sub command with no args - return_code: -1 output: This is a fake program, please give the correct arguments You can declare multiple commands with multiple behaviour in each of them at once but you need to match the correct signature """ __validate_file = Schema({Match(r'^[A-Za-z0-9]+$'): Any(list, None)}, extra=ALLOW_EXTRA) __validate_args = Schema([{ Optional('args'): list, Required('return_code'): int, Exclusive('output', 'output'): str, Exclusive('output_file', 'output'): str }]) def __init__(self, cfg=None, rawdata=None, path="/tmp/fakear/bin"): self.__log = logging.getLogger('fakear') self.__fakedcmds = {} self.__enabled = False self.__faked_path = path self.__cfg_paths = [] self.__search_for_interpreter() if all([not cfg, not rawdata]): return if all([cfg, rawdata]): raise FakearMultipleSourceException() if cfg: rawdata = self.__add_configuration(cfg) data = self.__validate_file(yaml.safe_load(rawdata)) self.__load_fake_cmds(data) # Properties @property def commands(self): """ Returns a dict with all faked programs embedded in this Fakear instance Can be set at instantiation """ return self.__fakedcmds @property def faked_path(self): """ Returns the path used to store fake programs generated by Fakear Default is: /tmp/fakear/bin Use self.set_path() when instance is disabled to modify """ return self.__faked_path @property def shell(self): """ Returns the shell path Fakear will use for making fake programs """ return self.__shell # Private method def __search_for_interpreter(self): process = check_output(["which", "bash"]) self.__shell = process.decode().replace("\n", "") return self.__shell def __load_fake_cmds(self, data): for cmd, args in data.items(): if args: self.__fakedcmds[cmd] = self.__validate_args(args) else: self.__fakedcmds[cmd] = [] def __add_configuration(self, filepath): if "/" in filepath: path = "/".join(filepath.split("/")[:-1]) self.__cfg_paths.append(path) with open(filepath) as cfg: rawdata = cfg.read() return rawdata def __search_for_file(self, filepath): self.__log.debug("checking availability of file {}".format(filepath)) for path in self.__cfg_paths: tmp_path = os.path.join(path, filepath) self.__log.debug(" checking {}".format(tmp_path)) if os.path.exists(tmp_path): self.__log.debug(" {} found !".format(tmp_path)) return tmp_path raise FakearFileNotFound() def __write_binaries(self): for command, subcmds in self.__fakedcmds.items(): subs = sorted(subcmds, key=lambda subcmd: len(subcmd.get('args', [])), reverse=True) prg = [] for sub in subs: sub_extract = sub.get('args', []) zipped_subs = list( zip(range(1, len(sub_extract) + 1), sub_extract)) sub_args = { 'length': len(zipped_subs), 'arg_line': " && ".join([ '"${{{arg}}}" = "{value}"'.format(arg=arg[0], value=arg[1]) for arg in zipped_subs ]) } if sub_args['arg_line']: if not prg: prg.append(templates.SH_IF.format(**sub_args)) else: prg.append(templates.SH_ELIF.format(**sub_args)) else: if prg: prg.append(templates.SH_ELSE) if "output_file" in sub.keys(): output_path = os.path.join(self.__faked_path, "{}_files".format(command)) if not os.path.exists(output_path): os.makedirs(output_path) out_file = sub.get('output_file', None) if out_file: src_filepath = self.__search_for_file(out_file) src_filename = src_filepath.split("/")[-1] sub['output_file'] = os.path.join( output_path, src_filename) copyfile(src_filepath, sub['output_file']) prg.append(templates.SH_OUTPUT_FILE.format(**sub)) else: prg.append(templates.SH_OUTPUT.format(**sub)) if len(prg) > 1: prg.append(templates.SH_FI) if not prg: prg.append(templates.SH_DEFAULT) self.__write_file(command, prg) def __write_file(self, command, prg): filepath = os.path.join(self.faked_path, command) with open(filepath, 'w+') as prgfile: header = templates.SH_HEADER.format(shell_path=self.__shell) prgfile.writelines(header) prgfile.writelines(prg) os.chmod(filepath, 0o777) def __enable_path(self): if self.__faked_path not in os.environ["PATH"]: os.environ["PATH"] = '{}:{}'.format(self.__faked_path, os.environ["PATH"]) def __disable_path(self): if self.__faked_path in os.environ["PATH"]: path = ":".join([ p for p in os.environ["PATH"].split(":") if self.__faked_path not in p ]) os.environ['PATH'] = path # Context Manager def __enter__(self): self.enable() return self def __exit__(self, exception_type, exception_val, trace): self.disable() # API def set_path(self, path): """ Set a new path where fake programs would be generated and invoked Path is not modifiable when this instance is enabled or used inside a context """ if not self.__enabled: self.__faked_path = path def enable(self): """ Enable this Fakear instance: - Create the path for fake programs - Write programs according to the configuration data you provide - Adds fakear path to env PATH variable When an instance is enabled, you can't modify data inside """ if not os.path.exists(self.__faked_path): os.makedirs(self.__faked_path) self.__write_binaries() self.__enable_path() self.__enabled = True def disable(self): """ Disable this Fakear instance and clean everything """ if os.path.exists(self.__faked_path): rmtree(self.__faked_path) self.__disable_path() self.__enabled = False
from tdt.validators import SchemaCheck from tdt.validators.job_file.core import filter_regex_options_schema from tdt.validators.job_file.filter_validator import FilterValidator # Deleting a project can be done by filter or id from tdt.validators.job_file.project import base_schema # Debugging from prettyprinter import pprint as pp _delete_obj = { # User is REQUIRED to indicate from where the project name is derived # User can create a project either by ID *or* from a search Required('from'): { # When using an ID, we just need an positive whole number Exclusive('id', 'project.name.source'): All(int, Range(min=1)), # When using filter(s) as the source of the project(s) name, validation is a bit more complex. # So for now, we just make sure that only name or filters is provided and if filters is provided # that the user has given us a list of at least one object. Later, we'll validate each object ## Exclusive('filters', 'project.name.source'): All(list, Length(min=1)), } } # It makes no sense to have the mutate/delete option nor does it make sense to have the absent option so we # use a modified version of the core.__init__ one ## _projects_filter_obj_schema = {
int, Optional('minutes', default=0): int, } _task_reschedule_due_schema = { # Either: # - date is relative # - date is explicit # - date is literal # - date is specified as a delta (e.g. +1 day +2h...) # - date is ABSENT ## # If the user provides a literal string, then ToDoist will try to parse the string Exclusive('literal', 'task.due'): All(str, Length(min=1)), # Relative dates Exclusive('relative', 'task.due'): { # must be in _relative_date_strings and something that can be parsed into a _dt obj Required('to', msg="Must be one of {}".format(relative_date_strings)): All(In(relative_date_strings), validate_date_match), # Relative implies a direction Optional('direction', default=relative_date_directions[0], msg="Must be one of {}".format(relative_date_directions)): In(relative_date_directions) },
_reminder_base_schema = Schema({ # Every reminder object must have a type which we'll use for further validation Required('type'): In(_reminder_types, msg="reminder type must be one of {}".format(_reminder_types.keys())), # Additionally, each reminder object must define how the reminder is to be sent to the user Required('service'): In(_reminder_services, msg="reminder service must be one of {}".format(_reminder_services)), }, extra=ALLOW_EXTRA) ## # It makes NO SENSE to support the option:delete on source selectors. Why would you delete the thing you want # to put a reminder on!? Because we can't use the normal schemas from core.__init__, we create our own here _reminder_apply_task_filter_obj_schema = { # Search for regex matches against task-title Exclusive('content', 'task:'): { # If task.title is specified, MUST be a string w/ at least 1 char Required('match'): Any(str, Length(min=1)), Optional('option', default=mutate_only_option_schema_default): mutate_only_option_schema }, # Date is a bit more complicated. The user can either specify an absolute date/time stamp in ISO format # or use one of a few 'relative' words like 'tomorrow' or 'monday' as well as specifying the direction # of search. Exclusive('date', 'task:'): { # User wants tasks with NO DUE DATE # Note: Because of Exclusive() calls, it makes no sense too have # absent: False, so we only accept True values Exclusive('absent', 'task.selector:'): True,