def create(context, name, data, convert_from=None): ''' Create new UserData object :param name: Name of userdata script :param data: Contents of script :param convert_from: Detemines if script should be converted from another format to yaml. By default don't try to convert. Possible options: yaml, json, native (native list or dictionaries, sent in request's format) ''' userdata = UserData() userdata.name = name if convert_from is None: if data.endswith('\n'): userdata.data = data else: userdata.data = data + '\n' elif convert_from == 'native': userdata.data = '#cloud-config\n' + pyaml.dumps(data) + '\n' elif convert_from == 'pyaml': userdata.data = '#cloud-config\n' + pyaml.dumps(pyaml.loads(data)) + '\n' elif convert_from == 'json': userdata.data = '#cloud-config\n' + pyaml.dumps(simplejson.loads(data)) + '\n' else: raise CoreException('unsupported_format') userdata.user_id = context.user_id userdata.save() return userdata.to_dict
def do_list_instances(self, line): """ 列出应用的基本信息 """ method = 'GET' url = 'list_instances' client, request = get_client(method, url) response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def self_send_msg(self, msg): print pyaml.dumps(msg) print '------------------------------------------' method = 'POST' client, request = get_client(method, 'send_message') request.body = json.dumps(msg) response = client.fetch(request) print 'response: ++++++++++++++++++++++++++++++++' print pyaml.dumps(json.loads(response.body))
def do_get_resources_info(self, line): """ 获取栈中的资源的详细信息 """ params = line.replace(' ', '/') method = 'GET' url = 'get_resources_info/%s' % params client, request = get_client(method, url) response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def do_get_stack(self, line): """ 获取栈的状态。 参数为stack id """ params = line.replace(' ', '/') url = 'get_stack/%s' % params method = 'GET' client, request = get_client(method, url) response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def do_clear_redis_msg(self, line): """ 清理redis中的内容 参数为需要清理的key前缀 """ params = line.replace(' ', '/') method = 'GET' url = 'clear_redis_msg/%s' % params client, request = get_client(method, url) response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def do_download_path(self, line): """ 获取下载到云主机中的包的名称,根据镜像名称分析得来。 仅支持ubuntu以及centos """ params = line.replace(' ', '/') method = 'GET' url = 'download_path/%s' % params client, request = get_client(method, url) response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def do_list_app(self, line): """ 列出应用的详情 参数是应用的serial 可以通过tab补全 """ method = 'GET' params = line.replace(' ', '/') url = 'list_app/%s' % params client, request = get_client(method, url) response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def do_get_stack_resources(self, line): """ 获取栈中的资源情况 参数为栈的ID。 栈的ID可以通过list_app方法获得。 """ params = line.replace(' ', '/') method = 'GET' url = 'get_stack_resources/%s' % params client, request = get_client(method, url) response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def do_delete_instance(self, line): """ 删除应用 参数是应用的serial 可以通过tab键补全 """ params = line.replace(' ', '/') method = 'POST' url = 'delete_instance/%s' % params client, request = get_client(method, url) request.body = '{}' response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def do_redis(self, line): """ 列出redis中的内容 参数是需要列出的key的前缀: mapup execute_script manage_error 如果不传,则返回全部内容 """ params = line.replace(' ', '/') method = 'GET' url = 'redis/%s' % params client, request = get_client(method, url) response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def run_collation(): """ Execute collation of all YAML files into single-file databases, in two formats. """ encodings_raw = load_encodings() profiles_raw = load_profiles() profiles_substituted = {} for profile_name in profiles_raw.keys(): profiles_substituted[profile_name] = \ substitute_profile(profile_name, profiles_raw, encodings_raw) encodings_filtered = filter_encodings(encodings_raw, profiles_substituted) capabilities = {'profiles': profiles_substituted, 'encodings': encodings_filtered} # Dump output in format that is safe for human consumption in reasonable quantities json_capabilities = json.dumps(capabilities, sort_keys=True, indent=4, separators=(',', ': ')) with open(BASE_DIR + "/../dist/capabilities.json", "wb+") as json_f: json_f.write(json_capabilities.encode('utf-8')) # Convert it to YAML, preserving the same order ordered_dict = json.loads(json_capabilities, object_pairs_hook=collections.OrderedDict) yml_capabilities = pyaml.dumps(ordered_dict, string_val_style='"', explicit_start=True) with open(BASE_DIR + "/../dist/capabilities.yml", "wb+") as yml_f: yml_f.write(yml_capabilities)
def update_config(self, odict_object): json_string = Dict(odict_object).to_pretty_json_string() yaml_string = pyaml.dumps(odict_object) self.json_file.write(json_string) self.yaml_file.write(yaml_string)
def result_proxy_to_yaml(resultproxy): rows = resultproxy.fetchall() as_list = [ OrderedDict(sorted(r.items(), key=lambda e: e[0])) for r in rows ] formatted = pyaml.dumps(as_list) return formatted
def execute(self): try: if not self.executed: self.executed=True self.log.debug('params:') self.log.debug(pyaml.dumps(self.params)) script_content=self.params['execute_script_content'] script_params=self.params['script_params'] if 'info_token' in self.params: self.info_token=self.params['info_token'] rs=yield self.get_stack_id_list() self.log.debug('stack id: %s'%str(rs)) for stack_id in [_['stack_id'] for _ in rs]: temp=yield get_stack_resources(stack_id) self.command_params=self.command_params+temp roles=yield get_roles(stack_id) self.roles=self.roles+roles # 根据脚本参数判断发送的目标 self.filter_by_group(script_params) self.filter_by_ip(script_params) self.log.debug(self.command_params) self.ips=[_['ip'] for _ in self.command_params] for command_p in self.command_params: self._send_msg(command_p,script_content,script_params) except: self.log.error(generals.trace())
def Generate(self, dirname): if pyaml_found == False: print "please install pyaml" sys.exit(1) if not os.path.exists(dirname): os.makedirs(dirname) elif not os.path.isdir(dirname): print "-o option must specify directory" sys.exit(1) for ctype in self._type_map.values(): self._GenerateTypeMap(ctype) base = { "id": "base", "prefix": "/", "plural": "base", "type": "abstract", "parents": {}, "references": {}, "schema": { "type": "object", "required": [], "properties": {} } } for ident in self._identifier_map.values(): self._objectsList.append(ident._name) filename = os.path.join(dirname, ident._name + "-schema.yml") self._GenerateJavascriptSchema(ident, base, filename) # Generate the file containing the list of all identfiers/objects # Generate the base schema objFileName = os.path.join(dirname, "base.yml") objFile = self._parser.makeFile(objFileName) objFile.write(pyaml.dumps(base, indent=2, safe=True)) typeFileName = os.path.join(dirname, "types.yml") typeFile = self._parser.makeFile(typeFileName) typeJson = {"definitions": self._json_type_map} typeFile.write(pyaml.dumps(typeJson, indent=2, safe=True)) print "Done!" print "Schemas generated under directory: " + dirname
def meta_yaml(self): """ Build the meta.yaml string based on discovered values. Here we use a nested OrderedDict so that all meta.yaml files created by this script have the same consistent format. Otherwise we're the whims of Python dict sorting. We use pyaml (rather than yaml) because it has better handling of OrderedDicts. """ DEPENDENCIES = sorted(self.dependencies) d = OrderedDict(( ( 'package', OrderedDict(( ('name', 'bioconductor-' + self.package.lower()), ('version', self.version), )), ), ( 'source', OrderedDict(( ('fn', self.tarball_basename), ('url', self.tarball_url), ('md5', self.md5), )), ), ( 'build', OrderedDict(( ('number', 0), ('rpaths', ['lib/R/lib/', 'lib/']), )), ), ( 'requirements', OrderedDict(( # If you don't make copies, pyaml sees these as the same # object and tries to make a shortcut, causing an error in # decoding unicode. Possible pyaml bug? Anyway, this fixes # it. ('build', DEPENDENCIES[:]), ('run', DEPENDENCIES[:]), )), ), ( 'test', OrderedDict(( ('commands', ['''$R -e "library('{package}')"'''.format( package=self.package)]), )), ), ( 'about', OrderedDict(( ('home', self.url), ('license', self.license), ('summary', self.description['description']), )), ), )) return pyaml.dumps(d).decode('utf-8')
def alerter_deps(config): alerter = Alerter(config=config) x = pyaml.PrettyYAMLDumper.ignore_aliases try: pyaml.PrettyYAMLDumper.ignore_aliases = lambda *a: True s = pyaml.dumps(alerter.dependency_tree()).decode('utf-8') s = s.replace(": {}", '') s = s.replace(":", '') sys.stdout.write(s) finally: pyaml.PrettyYAMLDumper.ignore_aliases = x
def yaml_dump(self, data, indent=2, safe=True): if yaml_parser == 'pyaml': return pyaml.dumps(data, indent=indent, safe=safe) elif yaml_parser == 'PyYAML': if safe: return yaml.safe_dump( data, indent=indent, default_flow_style=False) else: return yaml.dump(data, indent=indent, default_flow_style=False) else: print("please install pyaml or PyYAML") sys.exit(1)
def run_ansible_play(play_dict, ansible_module_path=ANSIBLE_MODULE_PATH, inventory=INVENTORY, testdir=TESTDIR, testcase=None): if testcase is not None: testdir = os.path.join(testdir, testcase) if not os.path.exists(testdir): os.makedirs(testdir) test_log = os.path.join(testdir, 'test.log') inventory_file = os.path.join(testdir, 'inventory.txt') play_yml = os.path.join(testdir, 'play.yml') # Remove stale logs if os.path.exists(test_log): os.remove(test_log) # Write dict to proper yaml playbook with open(play_yml, 'w') as fh: fh.write(pyaml.dumps(play_dict)) # Write the inventory with open(inventory_file, 'w') as fh: fh.write(inventory) actual_command = 'ANSIBLE_LOG_PATH="%s" ansible-playbook -i %s -M %s %s -vvv' % (test_log, inventory_file, ansible_module_path, play_yml) print(actual_command) retval = subprocess.call(actual_command, shell=True) # Parse json return object from verbose log output with open(test_log, 'r') as fh: json_lines = [] in_json = False for line in fh: if not in_json: # Start of ansible return json if '{' in line: json_lines.append(line[line.index('{'):]) in_json = True else: json_lines.append(line) # break at last line of ansible return json if re.match(r'^}$', line) is not None: break # Return json object or None for failed parsing if len(json_lines) > 0: return json.loads(''.join(json_lines)) else: return None
def meta_yaml(self): """ Build the meta.yaml string based on discovered values. Here we use a nested OrderedDict so that all meta.yaml files created by this script have the same consistent format. Otherwise we're the whims of Python dict sorting. We use pyaml (rather than yaml) because it has better handling of OrderedDicts. """ url = self.bioaRchive_url if not url: url = self.tarball_url DEPENDENCIES = sorted(self.dependencies) d = OrderedDict( ( ("package", OrderedDict((("name", "bioconductor-" + self.package.lower()), ("version", self.version)))), ("source", OrderedDict((("fn", self.tarball_basename), ("url", url), ("md5", self.md5)))), ("build", OrderedDict((("number", self.build_number), ("rpaths", ["lib/R/lib/", "lib/"])))), ( "requirements", OrderedDict( ( # If you don't make copies, pyaml sees these as the same # object and tries to make a shortcut, causing an error in # decoding unicode. Possible pyaml bug? Anyway, this fixes # it. ("build", DEPENDENCIES[:]), ("run", DEPENDENCIES[:]), ) ), ), ( "test", OrderedDict((("commands", ['''$R -e "library('{package}')"'''.format(package=self.package)]),)), ), ( "about", OrderedDict( (("home", self.url), ("license", self.license), ("summary", self.description["description"])) ), ), ) ) return pyaml.dumps(d).decode("utf-8")
def __init__(self, node_id, params, serial): self.stack_id = None self.node_id = node_id self.serial = serial logger = logging.getLogger('streamlet_' + serial + '_' + node_id) path = '%s/%s' % (cfgutils.getval('log', 'path'), serial) if not os.path.exists(path): os.mkdir(path) formatter = logging.Formatter( '%(asctime)s - %(filename)s(line:%(lineno)d)' ' - [%(levelname)s] - %(message)s') ch = logging.FileHandler( ('%s/%s/%s.log' % (cfgutils.getval('log', 'path'), serial, node_id))) logger.setLevel(logging.DEBUG) ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) logger.addHandler(ch) logger.info('app serial: %s' % self.serial) logger.info('origin params:\n %s' % pyaml.dumps(params)) self.log = logger self.params = self.merge_params(params) # 所有的步骤,都不应该被执行两次 self.executed = False
def _GenerateJavascriptSchema(self, ident, base, filename): file = self._parser.makeFile(filename) propertiesJSON = {} identProperties = ident.getProperties() # First loop through the direct properties and generate the schema propertiesOrder = [] required = [] for prop in identProperties: propertyID = self._convertHyphensToUnderscores(prop._name) propMemberInfo = prop._memberinfo xelementType = prop._xelement.type propType = self._getJSDataType(xelementType) presence = prop.getPresence() simple_type = prop.getElement().getSimpleType() propSchema = {} if propType == "object": if self._json_type_map.get(xelementType): subJson = { "$ref": "types.json#/definitions/" + xelementType } else: subJson = {"type": propType} else: subJson = {"type": propType} if prop.isMap(): subJson["collectionType"] = "map" subJson["mapKey"] = prop.getMapKeyName() subJson["wrapper"] = prop.isMapUsingWrapper() elif prop.isList(): subJson["collectionType"] = "list" subJson["wrapper"] = prop.isListUsingWrapper() default = prop.getDefault() if default: if propType == "boolean": subJson["default"] = default == "true" elif propType == "number": subJson["default"] = int(default) else: subJson["default"] = default if presence == 'required': required.append(propertyID) if simple_type: subJson = self.generateRestrictions(simple_type, subJson) subJson["presence"] = presence subJson["operations"] = prop.getOperations() try: subJson["description"] = prop.getDescription() except ValueError as detail: pass if prop._parent == "all": base["schema"]["properties"][propertyID] = subJson else: propertiesJSON[propertyID] = subJson # Now look for the links and generate respective schema, exclude the children (has relationship) objects references = {} for link_info in ident.getLinksInfo(): presence = link_info[0].getPresence() operation = link_info[0].getOperations() try: description = link_info[0].getDescription() except: description = "" link_to = ident.getLinkTo(link_info) link_type = link_info[0]._xelement.type if not ident.isLinkRef(link_info): continue reference = self._convertHyphensToUnderscores(link_to.getName()) subJson = { "operations": operation, "presence": presence, "description": description } if self._json_type_map.get(link_type): subJson["$ref"] = "types.json#definitions/" + link_type if "derived" in link_info[2]: subJson["derived"] = True if link_info[0]._idl_info[1] == "all": base["references"][reference] = subJson else: references[reference] = subJson parents = {} parents_obj = ident.getParents() if parents_obj: for parent in parents_obj: presence = parent[1].getPresence() operation = parent[1].getOperations() try: description = parent[1].getDescription() except: description = "" subJson = { "operations": operation, "presence": presence, "description": description } link_type = parent[1]._xelement.type if ident.isDerived(parent[0]): subJson["derived"] = True if self._json_type_map.get(link_type): subJson["$ref"] = "types.json#definitions/" + link_type parents[parent[0].getJsonName()] = subJson id = self._convertHyphensToUnderscores(ident._name) # Then look for back links and create back_ref schema if required jsonSchema = { "id": id, "prefix": "/", "plural": id + "s", "extends": ["base"], "api_style": "contrail", "parents": parents, "references": references, "schema": { "type": "object", "required": required, "properties": propertiesJSON } } file.write(pyaml.dumps(jsonSchema, indent=2, safe=True))
def meta_yaml(self): """ Build the meta.yaml string based on discovered values. Here we use a nested OrderedDict so that all meta.yaml files created by this script have the same consistent format. Otherwise we're at the mercy of Python dict sorting. We use pyaml (rather than yaml) because it has better handling of OrderedDicts. However pyaml does not support comments, but if there are gcc and llvm dependencies then they need to be added with preprocessing selectors for ``# [linux]`` and ``# [osx]``. We do this with a unique placeholder (not a jinja or $-based string.Template) so as to avoid conflicting with the conda jinja templating or the ``$R`` in the test commands, and replace the text once the yaml is written. """ version_placeholder = '{{ version }}' package_placeholder = '{{ name }}' package_lower_placeholder = '{{ name|lower }}' bioc_placeholder = '{{ bioc }}' def sub_placeholders(x): return ( x .replace(self.version, version_placeholder) .replace(self.package, package_placeholder) .replace(self.package_lower, package_lower_placeholder) .replace(self.bioc_version, bioc_placeholder) ) url = [ sub_placeholders(u) for u in [ # keep the one that was found self.bioconductor_tarball_url, # use the built URL, regardless of whether it was found or not. # bioaRchive and cargo-port cache packages but only after the # first recipe is built. bioarchive_url(self.package, self.version, self.bioc_version), cargoport_url(self.package, self.version, self.bioc_version), ] if u is not None ] DEPENDENCIES = sorted(self.dependencies) additional_run_deps = [] if self.is_data_package: additional_run_deps.append('curl') d = OrderedDict(( ( 'package', OrderedDict(( ('name', 'bioconductor-{{ name|lower }}'), ('version', '{{ version }}'), )), ), ( 'source', OrderedDict(( ('url', url), ('md5', self.md5), )), ), ( 'build', OrderedDict(( ('number', self.build_number), ('rpaths', ['lib/R/lib/', 'lib/']), )), ), ( 'requirements', OrderedDict(( # If you don't make copies, pyaml sees these as the same # object and tries to make a shortcut, causing an error in # decoding unicode. Possible pyaml bug? Anyway, this fixes # it. ('host', DEPENDENCIES[:]), ('run', DEPENDENCIES[:] + additional_run_deps), )), ), ( 'test', OrderedDict(( ( 'commands', ['''$R -e "library('{{ name }}')"'''] ), )), ), ( 'about', OrderedDict(( ('home', sub_placeholders(self.url)), ('license', self.license), ('summary', self.pacified_description()), )), ), )) if self.extra: d['extra'] = self.extra if self._cb3_build_reqs: d['requirements']['build'] = [] else: d['build']['noarch'] = 'generic' for k, v in self._cb3_build_reqs.items(): d['requirements']['build'].append(k + '_' + "PLACEHOLDER") rendered = pyaml.dumps(d, width=1e6).decode('utf-8') # Add Suggests: and SystemRequirements: renderedsplit = rendered.split('\n') idx = renderedsplit.index('requirements:') if self.packages[self.package].get('SystemRequirements', None): renderedsplit.insert(idx, '# SystemRequirements: {}'.format(self.packages[self.package]['SystemRequirements'])) if self.packages[self.package].get('Suggests', None): renderedsplit.insert(idx, '# Suggests: {}'.format(self.packages[self.package]['Suggests'])) rendered = '\n'.join(renderedsplit) + '\n' rendered = ( '{% set version = "' + self.version + '" %}\n' + '{% set name = "' + self.package + '" %}\n' + '{% set bioc = "' + self.bioc_version + '" %}\n\n' + rendered ) for k, v in self._cb3_build_reqs.items(): rendered = rendered.replace(k + '_' + "PLACEHOLDER", v) tmpdir = tempfile.mkdtemp() with open(os.path.join(tmpdir, 'meta.yaml'), 'w') as fout: fout.write(rendered) return fout.name
if 'execute_script_content' in json.dumps(p.keys()): print(p['execute_script_content']) pp = p for a in i_c['action']: if a['name'] == action_name: ex_keys = [ _ for _ in a['streamlet'] if _.find('execute_script') != -1 ] for k in ex_keys: for p in a['streamlet'][k]['params']: if 'execute_script_content' in json.dumps(p.keys()): p['execute_script_content'] = pp[ 'execute_script_content'] print(pyaml.dumps(pp)) print('----------------------------------------') for a in i_c['action']: if a['name'] == action_name: ex_keys = [ _ for _ in a['streamlet'] if _.find('execute_script') != -1 ] for k in ex_keys: for p in a['streamlet'][k]['params']: if 'execute_script_content' in json.dumps(p.keys()): print(p['execute_script_content']) with codecs.open('%s/%s.yaml' % (instance_path, str(serial)), 'w+', 'utf-8') as f:
def meta_yaml(self): """ Build the meta.yaml string based on discovered values. Here we use a nested OrderedDict so that all meta.yaml files created by this script have the same consistent format. Otherwise we're at the mercy of Python dict sorting. We use pyaml (rather than yaml) because it has better handling of OrderedDicts. However pyaml does not support comments, but if there are gcc and llvm dependencies then they need to be added with preprocessing selectors for `# [linux]` and `# [osx]`. We do this with a unique placeholder (not a jinja or $-based string.Template so as to avoid conflicting with the conda jinja templating or the `$R` in the test commands, and replace the text once the yaml is written. """ url = self.bioaRchive_url if not url: url = self.tarball_url DEPENDENCIES = sorted(self.dependencies) d = OrderedDict(( ( 'package', OrderedDict(( ('name', 'bioconductor-' + self.package.lower()), ('version', self.version), )), ), ( 'source', OrderedDict(( ('fn', self.tarball_basename), ('url', url), ('md5', self.md5), )), ), ( 'build', OrderedDict(( ('number', self.build_number), ('rpaths', ['lib/R/lib/', 'lib/']), )), ), ( 'requirements', OrderedDict(( # If you don't make copies, pyaml sees these as the same # object and tries to make a shortcut, causing an error in # decoding unicode. Possible pyaml bug? Anyway, this fixes # it. ('build', DEPENDENCIES[:]), ('run', DEPENDENCIES[:]), )), ), ( 'test', OrderedDict((('commands', [ '''$R -e "library('{package}')"'''.format( package=self.package) ]), )), ), ( 'about', OrderedDict(( ('home', self.url), ('license', self.license), ('summary', self.description['description']), )), ), )) if self.depends_on_gcc: d['requirements']['build'].append('GCC_PLACEHOLDER') d['requirements']['build'].append('LLVM_PLACEHOLDER') rendered = pyaml.dumps(d).decode('utf-8') rendered = rendered.replace('GCC_PLACEHOLDER', 'gcc # [linux]') rendered = rendered.replace('LLVM_PLACEHOLDER', 'llvm # [osx]') return rendered
def meta_yaml(self): """ Build the meta.yaml string based on discovered values. Here we use a nested OrderedDict so that all meta.yaml files created by this script have the same consistent format. Otherwise we're at the mercy of Python dict sorting. We use pyaml (rather than yaml) because it has better handling of OrderedDicts. However pyaml does not support comments, but if there are gcc and llvm dependencies then they need to be added with preprocessing selectors for `# [linux]` and `# [osx]`. We do this with a unique placeholder (not a jinja or $-based string.Template so as to avoid conflicting with the conda jinja templating or the `$R` in the test commands, and replace the text once the yaml is written. """ url = self.bioaRchive_url if not url: url = self.tarball_url DEPENDENCIES = sorted(self.dependencies) d = OrderedDict(( ( 'package', OrderedDict(( ('name', 'bioconductor-' + self.package.lower()), ('version', self.version), )), ), ( 'source', OrderedDict(( ('fn', self.tarball_basename), ('url', url), ('md5', self.md5), )), ), ( 'build', OrderedDict(( ('number', self.build_number), ('rpaths', ['lib/R/lib/', 'lib/']), )), ), ( 'requirements', OrderedDict(( # If you don't make copies, pyaml sees these as the same # object and tries to make a shortcut, causing an error in # decoding unicode. Possible pyaml bug? Anyway, this fixes # it. ('build', DEPENDENCIES[:]), ('run', DEPENDENCIES[:]), )), ), ( 'test', OrderedDict(( ('commands', ['''$R -e "library('{package}')"'''.format( package=self.package)]), )), ), ( 'about', OrderedDict(( ('home', self.url), ('license', self.license), ('summary', self.description['description']), )), ), )) if self.depends_on_gcc: d['requirements']['build'].append('GCC_PLACEHOLDER') d['requirements']['build'].append('LLVM_PLACEHOLDER') rendered = pyaml.dumps(d).decode('utf-8') rendered = rendered.replace('GCC_PLACEHOLDER', 'gcc # [linux]') rendered = rendered.replace('LLVM_PLACEHOLDER', 'llvm # [osx]') return rendered
def result_proxy_to_yaml(resultproxy): rows = resultproxy.fetchall() as_list = [OrderedDict(sorted(r.items(), key=lambda e:e[0])) for r in rows] formatted = pyaml.dumps(as_list) return formatted
enableDataModelFilter = dataModelsToPublish["filterDataModels"] for dataModel in dataModels: echo("repoName", repoName) result = {} result[dataModel] = {} echo("dataModel=", dataModel) schemaUrl = "https://raw.githubusercontent.com/smart-data-models/" + repoName + "/master/" + dataModel + "/schema.json" echo("urlschema", schemaUrl) schemaExpanded = open_jsonref(schemaUrl) echo("schemaExpanded", schemaExpanded) result[dataModel]["properties"] = parse_payload(schemaExpanded, 1) try: # the required clause is optional required = schemaExpanded["required"] except: required = [] try: entityDescription = schemaExpanded["description"] except: entityDescription = "No description available" result[dataModel]["type"] = "object" result[dataModel]["description"] = entityDescription result[dataModel]["required"] = required echo("result", result) path = dataModel + "/" + baseModelFileName message = "updated " + baseModelFileName github_push_from_variable(pyaml.dumps(result, width=4096), repoName, path, message, globalUser, token)
def create_instance(request, name, action_name, seq): log = logging.getLogger('manor') try: log.debug(request.body) instance_path = cfgutils.getval('app', 'instance_path') template_path = cfgutils.getval('app', 'template_path') params = json.loads(request.body) app_name = params['app_name'] if 'type' in params: action_type = params['type'] else: action_type = 'deploy' if action_type == 'deploy': if not os.path.isfile('%s/%s.yaml' % (template_path, name)): raise Exception('error.manor.templates.not.exist') content = load_template(template_path, name.replace(' ', '') + '.yaml') else: with open( '%s/%s.yaml' % (instance_path, params['app_name'].replace(' ', ''))) as f: rs = f.read() content = yaml.safe_load(rs) if action_name not in [_['name'] for _ in content['action']]: raise Exception('error.manor.action.not.exist') if 'app_description' not in content: content['app_description'] = '' if params['app_description']: content['app_description'] = params['app_description'] # 合并参数 merge_params(action_name, content, log, params) check_template(content) info_token_list = [] if action_type == 'deploy': serial = uuid.uuid1() execute(( "INSERT INTO manor.manor_app_instance" "(template_name,app_name,app_serial,state,app_description,app_id)" "VALUES(%s,%s,%s,%s,%s,%s)"), (name, app_name, serial, 'building', content['app_description'], seq)) # save origin templates. log.debug('save app templates : %s' % str(serial)) with codecs.open( '%s/%s.yaml' % (instance_path, str(serial).replace(' ', '')), 'w+', 'utf-8') as f: f.write(yaml.safe_dump(content)) else: # 执行管理流程 serial = params['app_name'] streamlet_key = params['params'].keys() for k in streamlet_key: s_ps = params['params'][k]['params'] for s_p in s_ps: if 'info_return' in s_p and s_p['info_return']: token = str(uuid.uuid1()) s_ps.append({'info_token': token}) info_token_list.append(token) # 为了token merge_params(action_name, content, log, params) logging.getLogger('manor').debug(pyaml.dumps(content)) stream.execute(content, action_name, str(serial)) if action_type == 'deploy': optLog.write(request, optLog.Type.APP_INSTANCE, seq, Operator.CREATE, '%s' % app_name) else: action_label = [ _ for _ in content['action'] if _['name'] == action_name ][0]['label'] def get_result(rows): app_name = rows[0]['app_name'] optLog.write(request, optLog.Type.APP_INSTANCE, seq, Operator.EXECUTE_ACTION, '%s %s' % (app_name, action_label)) execute_query( "select * from manor.manor_app_instance where app_serial='%s'" % serial, get_result) return info_token_list, serial except Exception as e: log.error(generals.trace()) raise e
required=True, help='the params of script.', default=False) args = pars.parse_args() script_id = str(uuid.uuid1()) msg = { "id": script_id, "cmd": "execute_script", "target": { "name": "cowbell", "ip": args.ip }, "params": { "script_name": "manor_execute_script_" + script_id, "serial": 'test_serial', "script_content": args.content, "character": 'test_character', "params": json.loads(args.params) } } print 'msg:\n', pyaml.dumps(msg) method = 'POST' client, request = get_client(method, 'send_message') request.body = json.dumps(msg) response = client.fetch(request) print pyaml.dumps(json.loads(response.body))
def meta_yaml(self): """ Build the meta.yaml string based on discovered values. Here we use a nested OrderedDict so that all meta.yaml files created by this script have the same consistent format. Otherwise we're at the mercy of Python dict sorting. We use pyaml (rather than yaml) because it has better handling of OrderedDicts. However pyaml does not support comments, but if there are gcc and llvm dependencies then they need to be added with preprocessing selectors for `# [linux]` and `# [osx]`. We do this with a unique placeholder (not a jinja or $-based string.Template) so as to avoid conflicting with the conda jinja templating or the `$R` in the test commands, and replace the text once the yaml is written. """ version_placeholder = '{{ version }}' package_placeholder = '{{ name }}' package_lower_placeholder = '{{ name|lower }}' bioc_placeholder = '{{ bioc }}' def sub_placeholders(x): return (x.replace(self.version, version_placeholder).replace( self.package, package_placeholder).replace( self.package_lower, package_lower_placeholder).replace(self.bioc_version, bioc_placeholder)) url = [ sub_placeholders(u) for u in [ # keep the one that was found self.bioconductor_tarball_url, # use the built URL, regardless of whether it was found or not. # bioaRchive and cargo-port cache packages but only after the # first recipe is built. bioarchive_url(self.package, self.version, self.bioc_version), cargoport_url(self.package, self.version, self.bioc_version), ] if u is not None ] DEPENDENCIES = sorted(self.dependencies) additional_run_deps = [] if self.is_data_package: additional_run_deps.append('curl') d = OrderedDict(( ( 'package', OrderedDict(( ('name', 'bioconductor-{{ name|lower }}'), ('version', '{{ version }}'), )), ), ( 'source', OrderedDict(( ('url', url), ('md5', self.md5), )), ), ( 'build', OrderedDict(( ('number', self.build_number), ('rpaths', ['lib/R/lib/', 'lib/']), )), ), ( 'requirements', OrderedDict(( # If you don't make copies, pyaml sees these as the same # object and tries to make a shortcut, causing an error in # decoding unicode. Possible pyaml bug? Anyway, this fixes # it. ('host', DEPENDENCIES[:]), ('run', DEPENDENCIES[:] + additional_run_deps), )), ), ( 'test', OrderedDict( (('commands', ['''$R -e "library('{{ name }}')"''']), )), ), ( 'about', OrderedDict(( ('home', sub_placeholders(self.url)), ('license', self.license), ('summary', self.pacified_description()), )), ), )) if self.extra: d['extra'] = self.extra if self._cb3_build_reqs: d['requirements']['build'] = [] else: d['build']['noarch'] = 'generic' for k, v in self._cb3_build_reqs.items(): d['requirements']['build'].append(k + '_' + "PLACEHOLDER") rendered = pyaml.dumps(d, width=1e6).decode('utf-8') # Add Suggests: and SystemRequirements: renderedsplit = rendered.split('\n') idx = renderedsplit.index('requirements:') if self.packages[self.package].get('SystemRequirements', None): renderedsplit.insert( idx, '# SystemRequirements: {}'.format( self.packages[self.package]['SystemRequirements'])) if self.packages[self.package].get('Suggests', None): renderedsplit.insert( idx, '# Suggests: {}'.format( self.packages[self.package]['Suggests'])) rendered = '\n'.join(renderedsplit) + '\n' rendered = ('{% set version = "' + self.version + '" %}\n' + '{% set name = "' + self.package + '" %}\n' + '{% set bioc = "' + self.bioc_version + '" %}\n\n' + rendered) for k, v in self._cb3_build_reqs.items(): rendered = rendered.replace(k + '_' + "PLACEHOLDER", v) tmpdir = tempfile.mkdtemp() with open(os.path.join(tmpdir, 'meta.yaml'), 'w') as fout: fout.write(rendered) return fout.name
def meta_yaml(self): """ Build the meta.yaml string based on discovered values. Here we use a nested OrderedDict so that all meta.yaml files created by this script have the same consistent format. Otherwise we're at the mercy of Python dict sorting. We use pyaml (rather than yaml) because it has better handling of OrderedDicts. However pyaml does not support comments, but if there are gcc and llvm dependencies then they need to be added with preprocessing selectors for `# [linux]` and `# [osx]`. We do this with a unique placeholder (not a jinja or $-based string.Template) so as to avoid conflicting with the conda jinja templating or the `$R` in the test commands, and replace the text once the yaml is written. """ version_placeholder = '{{ version }}' package_placeholder = '{{ name }}' package_lower_placeholder = '{{ name|lower }}' bioc_placeholder = '{{ bioc }}' def sub_placeholders(x): return (x.replace(self.version, version_placeholder).replace( self.package, package_placeholder).replace( self.package_lower, package_lower_placeholder).replace(self.bioc_version, bioc_placeholder)) url = [ sub_placeholders(u) for u in [ # keep the one that was found self.bioconductor_tarball_url, self.bioconductor_annotation_data_url, self.bioconductor_experiment_data_url, # use the built URL, regardless of whether it was found or not. # bioaRchive and cargo-port cache packages but only after the # first recipe is built. bioarchive_url(self.package, self.version, self.bioc_version), cargoport_url(self.package, self.version, self.bioc_version), ] if u is not None ] DEPENDENCIES = sorted(self.dependencies) additional_run_deps = [] if self.is_data_package: additional_run_deps.append('wget') d = OrderedDict(( ( 'package', OrderedDict(( ('name', 'bioconductor-{{ name|lower }}'), ('version', '{{ version }}'), )), ), ( 'source', OrderedDict(( ('fn', '{{ name }}_{{ version }}.tar.gz'), ('url', url), ('sha256', self.sha256), )), ), ( 'build', OrderedDict(( ('number', self.build_number), ('rpaths', ['lib/R/lib/', 'lib/']), )), ), ( 'requirements', OrderedDict(( # If you don't make copies, pyaml sees these as the same # object and tries to make a shortcut, causing an error in # decoding unicode. Possible pyaml bug? Anyway, this fixes # it. ('build', DEPENDENCIES[:]), ('run', DEPENDENCIES[:] + additional_run_deps), )), ), ( 'test', OrderedDict( (('commands', ['''$R -e "library('{{ name }}')"''']), )), ), ( 'about', OrderedDict(( ('home', sub_placeholders(self.url)), ('license', self.license), ('summary', self.description['description']), )), ), )) if self.depends_on_gcc: d['requirements']['build'].append('GCC_PLACEHOLDER') d['requirements']['build'].append('LLVM_PLACEHOLDER') rendered = pyaml.dumps(d, width=1e6).decode('utf-8') rendered = rendered.replace('GCC_PLACEHOLDER', 'gcc # [linux]') rendered = rendered.replace('LLVM_PLACEHOLDER', 'llvm # [osx]') rendered = ('{% set version = "' + self.version + '" %}\n' + '{% set name = "' + self.package + '" %}\n' + '{% set bioc = "' + self.bioc_version + '" %}\n\n' + rendered) tmp = tempfile.NamedTemporaryFile(delete=False).name with open(tmp, 'w') as fout: fout.write(rendered) return fout.name
def format_yaml(obj, prefix='---\n'): return highlight(prefix + pyaml.dumps(obj, safe=True).decode('utf-8'), lexers.YamlLexer(), formatters.TerminalFormatter())
"""Auto generate the autogallery form the images in a directory""" import pyaml import os filtered = (item for item in os.listdir('.') if item.endswith(".jpg") or item.endswith(".png")) output_stage_1 = [{"src": item} for item in filtered] print(pyaml.dumps(output_stage_1).decode())
def test_dumps(self): b = pyaml.dumps(data_str_multiline) self.assertIsInstance(b, bytes)
def meta_yaml(self): """ Build the meta.yaml string based on discovered values. Here we use a nested OrderedDict so that all meta.yaml files created by this script have the same consistent format. Otherwise we're the whims of Python dict sorting. We use pyaml (rather than yaml) because it has better handling of OrderedDicts. """ DEPENDENCIES = sorted(self.dependencies) d = OrderedDict(( ( 'package', OrderedDict(( ('name', 'bioconductor-' + self.package.lower()), ('version', self.version), )), ), ( 'source', OrderedDict(( ('fn', self.tarball_basename), ('url', self.tarball_url), ('md5', self.md5), )), ), ( 'build', OrderedDict(( ('number', 0), ('rpaths', ['lib/R/lib/', 'lib/']), )), ), ( 'requirements', OrderedDict(( # If you don't make copies, pyaml sees these as the same # object and tries to make a shortcut, causing an error in # decoding unicode. Possible pyaml bug? Anyway, this fixes # it. ('build', DEPENDENCIES[:]), ('run', DEPENDENCIES[:]), )), ), ( 'test', OrderedDict((('commands', [ '''$R -e "library('{package}')"'''.format( package=self.package) ]), )), ), ( 'about', OrderedDict(( ('home', self.url), ('license', self.license), ('summary', self.description['description']), )), ), )) return pyaml.dumps(d).decode('utf-8')