def test_ConfigDict(): d1 = ConfigDict({ 'a': 1, 'b': 'one', 'c': [1], 'd': { 'foo': 'x1', 'bar': 'x2' }, 'e': set((3, 5)), }) d1.merge({ 'a': 2, 'c': [1, 2], 'd': { 'bar': 'y2', 'baz': 'y3' }, 'e': set((3, 6)), 'f': 'func', }) assert d1 == { 'a': 2, 'b': 'one', 'c': [1, 1, 2], 'd': { 'foo': 'x1', 'bar': 'y2', 'baz': 'y3' }, 'e': set((3, 5, 6)), 'f': 'func', }
def test_ConfigDict(): d1 = ConfigDict({"a": 1, "b": "one", "c": [1], "d": {"foo": "x1", "bar": "x2"}, "e": set((3, 5))}) d1.merge({"a": 2, "c": [1, 2], "d": {"bar": "y2", "baz": "y3"}, "e": set((3, 6)), "f": "func"}) assert d1 == { "a": 2, "b": "one", "c": [1, 1, 2], "d": {"foo": "x1", "bar": "y2", "baz": "y3"}, "e": set((3, 5, 6)), "f": "func", }
def tasks(self, patterns, group='all', exclude=(), options=None): """yield tasks as given by pattern @param group: (str) name of a group @param pattern: (list - str) list of path patterns of files to be linted @param exclude: (list - str) list of path of files to be removed from selection @param options: (dict) extra options for group """ # It seems jshint won't ever accept options from command line # https://github.com/jshint/jshint/issues/807 # So we create a jshint config file for each "group" cfg = ConfigDict(copy.deepcopy(self._config)) if options: cfg.merge(options) config_file = '_hint_{}.json'.format(group) def write_config(): with open(config_file, 'w') as fp: json.dump(cfg, fp, indent=4, sort_keys=True) yield { 'name': config_file, 'actions': [write_config], 'targets': [config_file], 'uptodate': [config_changed(cfg)], } else: config_file = self.config_file # yield a task for every js file in selection base = Path('.') excluded = set([base.joinpath(e) for e in exclude]) for pattern in patterns: for src in base.glob(pattern): if src not in excluded: yield self(config_file, str(src))
def djinja_conffile(self): """Generate temporary docker djinja config """ if self._djinja_conffile is not None: return self._djinja_conffile tmpfile = tempfile.NamedTemporaryFile() data = ConfigDict({'datasources': self.config.get('datasources', {})}) data.merge(self.config['env']) try: tmpfile.write( yaml.dump(dict(data.viewitems()), default_flow_style=True)) tmpfile.seek(0) self.log.debug("Tempfile content written: \n%s", tmpfile.read()) except (IOError, OSError) as e: # config may not exist, so skip it gracefully. self.log.error("Unable to write docker djinja config file!") self.log.error("%s", e) sys.exit(1) self._djinja_conffile = tmpfile return self._djinja_conffile
def djinja_conffile(self): """Generate temporary docker djinja config """ if self._djinja_conffile is not None: return self._djinja_conffile tmpfile = tempfile.NamedTemporaryFile() data = ConfigDict({ 'datasources': self.config.get('datasources', {}) }) data.merge(self.config['env']) try: tmpfile.write(yaml.dump(dict(data.viewitems()), default_flow_style=True)) tmpfile.seek(0) self.log.debug("Tempfile content written: \n%s", tmpfile.read()) except (IOError, OSError) as e: # config may not exist, so skip it gracefully. self.log.error("Unable to write docker djinja config file!") self.log.error("%s", e) sys.exit(1) self._djinja_conffile = tmpfile return self._djinja_conffile
def pyd(self, line, cell): args = magic_arguments.parse_argstring(self.pyd, line) code = 'import ppyd;\n\n\ extern(C) void PydMain()\n{\n \ registerAll!(Alias!(__traits(parent, PydMain)))();\n\ }\n\n'\ + cell code = code if code.endswith('\n') else code + '\n' key = code, line, sys.version_info, sys.executable try: args.dub_config = json.loads(args.dub_config) except: args.dub_config = json.loads(ast.literal_eval(args.dub_config)) pass try: args.dub_args = ast.literal_eval(args.dub_args) except: pass if args.force: # Force a new module name by adding the current time to the # key which is hashed to determine the module name. key += (time.time(), ) args.dub_args = '--force ' + args.dub_args if args.name: module_name = py3compat.unicode_to_str(args.name) else: module_name = "_pyd_magic_" + hashlib.md5( str(key).encode('utf-8')).hexdigest() lib_dir = os.path.join(get_ipython_cache_dir(), 'pyd', module_name) if not os.path.exists(lib_dir): os.makedirs(lib_dir) if os.name == 'nt': so_ext = '.dll' else: so_ext = '.so' #might have to go to dylib on OS X at some point??? module_path = os.path.join(lib_dir, 'lib' + module_name + so_ext) have_module = os.path.isfile(module_path) need_pydize = not have_module if need_pydize: d_include_dirs = args.include pyd_file = os.path.join(lib_dir, module_name + '.d') pyd_file = py3compat.cast_bytes_py2( pyd_file, encoding=sys.getfilesystemencoding()) with io.open(pyd_file, 'w', encoding='utf-8') as f: f.write(code) pyd_dub_file = os.path.join(lib_dir, 'dub.json') pyd_dub_file = py3compat.cast_bytes_py2( pyd_dub_file, encoding=sys.getfilesystemencoding()) pyd_dub_selections_file = os.path.join(lib_dir, 'dub.selections.json') pyd_dub_selections_file = py3compat.cast_bytes_py2( pyd_dub_selections_file, encoding=sys.getfilesystemencoding()) pyd_dub_json = json.loads('{}') pyd_dub_json['name'] = module_name pyd_dub_json['dependencies'] = { "pyd": args.pyd_version, "ppyd": ">=0.1.2" } pyd_dub_json['subConfigurations'] = { "pyd": "python{0}{1}".format(sys.version_info.major, sys.version_info.minor) } pyd_dub_json['sourceFiles'] = [pyd_file] pyd_dub_json['targetType'] = 'dynamicLibrary' pyd_dub_json['dflags'] = ['-fPIC'] pyd_dub_json['libs'] = ['phobos2'] pyd_dub_json['versions'] = ['PydPythonExtension'] with io.open(pyd_dub_file, 'w', encoding='utf-8') as f: f.write( unicode(json.dumps(pyd_dub_json) + '\n', encoding='utf-8')) try: os.remove(pyd_dub_selections_file) except: pass dub_desc = json.loads( subprocess.check_output( ["dub", "describe", "--root=" + lib_dir], universal_newlines=True)) for pack in dub_desc['packages']: if pack['name'] == 'pyd': _infraDir = os.path.join(pack['path'], 'infrastructure') break if os.name == 'nt': boilerplatePath = os.path.join( _infraDir, 'd', 'python_dll_windows_boilerplate.d') else: boilerplatePath = os.path.join( _infraDir, 'd', 'python_so_linux_boilerplate.d') pyd_dub_json['sourceFiles'].append(boilerplatePath) if args.compiler == 'dmd': so_ctor_path = os.path.join(_infraDir, 'd', 'so_ctor.c') so_ctor_object_path = os.path.join(lib_dir, "so_ctor.o") subprocess.check_call([ 'cc', "-c", "-fPIC", "-o" + so_ctor_object_path, so_ctor_path ]) pyd_dub_json['sourceFiles'].append(so_ctor_object_path) mainTemplate = os.path.join(_infraDir, 'd', 'pydmain_template.d') mainTemplate = py3compat.cast_bytes_py2( mainTemplate, encoding=sys.getfilesystemencoding()) mainTemplateOut = os.path.join(lib_dir, 'pydmain.d') mainTemplateOut = py3compat.cast_bytes_py2( mainTemplateOut, encoding=sys.getfilesystemencoding()) with io.open(mainTemplate, 'r', encoding='utf-8') as t, io.open( mainTemplateOut, 'w', encoding='utf-8') as m: m.write(t.read() % {'modulename': module_name}) pyd_dub_json['sourceFiles'].append(mainTemplateOut) pyd_dub_json = ConfigDict(pyd_dub_json) pyd_dub_json.merge(args.dub_config) with io.open(pyd_dub_file, 'w', encoding='utf-8') as f: f.write( unicode(json.dumps(pyd_dub_json) + '\n', encoding='utf-8')) try: output = subprocess.check_output( ["dub", "build", "--root=" + lib_dir] + args.dub_args.split(' '), universal_newlines=True, stderr=subprocess.STDOUT) except (subprocess.CalledProcessError) as e: print(e.output) raise e if args.print_compiler_output: print(output) if not have_module: self._code_cache[key] = module_name module = imp.load_dynamic(module_name, module_path) self._import_all(module)
def __compile( cls, klass: "Workflow", name: str, bases: Tuple[Type["Workflow"], ...], props: Dict[str, Any], **kwargs, ): tasks: List[V1alpha1DAGTask] = [] templates: List[V1alpha1Template] = [] scopes: Dict[str, List[Any]] = {} # get scopes first for key, prop in props.items(): scope = getattr(prop, "__scope__", None) if scope is None: continue scoped_objects = [prop] scoped_objects.extend(scopes.get(scope, [])) scopes[scope] = scoped_objects for key, prop in props.items(): model = getattr(prop, "__model__", None) if model is None: continue template: Optional[V1alpha1Template] = None # V1alpha1Template if issubclass(model, V1alpha1Template): template = prop # closures require special treatment if hasattr(template, "__closure__") and template.script is not None: template = cls.__compile_closure(template, scopes) templates.append(template) # V1alpha1DAGTask elif issubclass(model, V1alpha1DAGTask): task = prop tasks.append(task) if tasks: main_template = V1alpha1Template(name="main") main_template.dag = V1alpha1DAGTemplate(tasks=tasks) templates.insert(0, main_template) spec_dict: dict = klass.spec parent_spec_dict: dict = bases[0].spec if isinstance( bases[0].spec, dict) else bases[0].spec.to_dict() config_spec_dict = ConfigDict(parent_spec_dict) spec_dict["entrypoint"] = spec_dict.get( "entrypoint", parent_spec_dict.get("entrypoint", 'main')) spec_dict["templates"] = templates config_spec_dict.merge(spec_dict) klass.spec: V1alpha1WorkflowSpec = V1alpha1WorkflowSpec( **config_spec_dict)
def pyd(self, line, cell): args = magic_arguments.parse_argstring(self.pyd, line) code = 'import ppyd;\n\n\ extern(C) void PydMain()\n{\n \ registerAll!(Alias!(__traits(parent, PydMain)))();\n\ }\n\n'\ + cell code = code if code.endswith('\n') else code+'\n' key = code, line, sys.version_info, sys.executable try: args.dub_config = json.loads(args.dub_config) except: args.dub_config = json.loads(ast.literal_eval(args.dub_config)) pass try: args.dub_args = ast.literal_eval(args.dub_args) except: pass if args.force: # Force a new module name by adding the current time to the # key which is hashed to determine the module name. key += (time.time(),) args.dub_args = '--force ' + args.dub_args if args.name: module_name = py3compat.unicode_to_str(args.name) else: module_name = "_pyd_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest() lib_dir = os.path.join(get_ipython_cache_dir(), 'pyd', module_name) if not os.path.exists(lib_dir): os.makedirs(lib_dir) if os.name == 'nt': so_ext = '.dll' else: so_ext = '.so' #might have to go to dylib on OS X at some point??? module_path = os.path.join(lib_dir, 'lib' + module_name + so_ext) have_module = os.path.isfile(module_path) need_pydize = not have_module if need_pydize: d_include_dirs = args.include pyd_file = os.path.join(lib_dir, module_name + '.d') pyd_file = py3compat.cast_bytes_py2(pyd_file, encoding=sys.getfilesystemencoding()) with io.open(pyd_file, 'w', encoding='utf-8') as f: f.write(code) pyd_dub_file = os.path.join(lib_dir, 'dub.json') pyd_dub_file = py3compat.cast_bytes_py2(pyd_dub_file, encoding=sys.getfilesystemencoding()) pyd_dub_selections_file = os.path.join(lib_dir, 'dub.selections.json') pyd_dub_selections_file = py3compat.cast_bytes_py2(pyd_dub_selections_file, encoding=sys.getfilesystemencoding()) pyd_dub_json = json.loads('{}') pyd_dub_json['name'] = module_name pyd_dub_json['dependencies'] = { "pyd": args.pyd_version, "ppyd": ">=0.1.3" } pyd_dub_json['subConfigurations'] = { "pyd": "python{0}{1}".format(sys.version_info.major, sys.version_info.minor) } pyd_dub_json['sourceFiles'] = [pyd_file] pyd_dub_json['targetType'] = 'dynamicLibrary' pyd_dub_json['dflags'] = ['-fPIC'] pyd_dub_json['libs'] = ['phobos2'] pyd_dub_json['versions'] = ['PydPythonExtension'] with io.open(pyd_dub_file, 'w', encoding='utf-8') as f: f.write(unicode(json.dumps(pyd_dub_json)+'\n', encoding='utf-8')) try: os.remove(pyd_dub_selections_file) except: pass dub_desc = json.loads(subprocess.check_output(["dub", "describe", "--root=" + lib_dir], universal_newlines = True)) for pack in dub_desc['packages']: if pack['name'] == 'pyd': _infraDir = os.path.join(pack['path'], 'infrastructure') break if os.name == 'nt': boilerplatePath = os.path.join(_infraDir, 'd', 'python_dll_windows_boilerplate.d' ) else: boilerplatePath = os.path.join(_infraDir, 'd', 'python_so_linux_boilerplate.d' ) pyd_dub_json['sourceFiles'].append(boilerplatePath) if args.compiler == 'dmd': so_ctor_path = os.path.join(_infraDir, 'd', 'so_ctor.c') so_ctor_object_path = os.path.join(lib_dir, "so_ctor.o") subprocess.check_call(['cc', "-c", "-fPIC", "-o" + so_ctor_object_path, so_ctor_path]) pyd_dub_json['sourceFiles'].append(so_ctor_object_path) mainTemplate = os.path.join(_infraDir, 'd', 'pydmain_template.d') mainTemplate = py3compat.cast_bytes_py2(mainTemplate, encoding=sys.getfilesystemencoding()) mainTemplateOut = os.path.join(lib_dir, 'pydmain.d') mainTemplateOut = py3compat.cast_bytes_py2(mainTemplateOut, encoding=sys.getfilesystemencoding()) with io.open(mainTemplate, 'r', encoding='utf-8') as t, io.open(mainTemplateOut, 'w', encoding='utf-8') as m: m.write(t.read() % {'modulename' : module_name}) pyd_dub_json['sourceFiles'].append(mainTemplateOut) pyd_dub_json = ConfigDict(pyd_dub_json) pyd_dub_json.merge(args.dub_config) with io.open(pyd_dub_file, 'w', encoding='utf-8') as f: f.write(unicode(json.dumps(pyd_dub_json)+'\n', encoding='utf-8')) try: output = subprocess.check_output(["dub", "build", "--root=" + lib_dir] + args.dub_args.split(' '), universal_newlines=True, stderr=subprocess.STDOUT) except (subprocess.CalledProcessError) as e: print(e.output) raise e if args.print_compiler_output: print(output) if not have_module: self._code_cache[key] = module_name module = imp.load_dynamic(module_name, module_path) self._import_all(module)