def jq_stage(stage, spec): ''' :param stage: common stage parent object :param spec: stage JSON-like spec :return: None ''' binds = spec['bindings'] binds = process_jsonlike(binds,'has("$wflowref")',lambda x: process_wflowref(x,stage.view)) log.info('transforming binds: %s', binds) stagescript = spec['stepscript'] singlesteps = jq.jq(stagescript).transform(binds,multiple_output = False) singlesteppars = map( lambda x: process_jsonlike(x, 'has("$wflowpointer")',process_wflowpointer), singlesteps ) postscript = spec['postscript'] for i, pars in enumerate(singlesteppars): singlename = '{}_{}'.format(stage.name, i) finalized, inputs = finalize_input(pars, stage.view) log.info('postscripting: %s',finalized) after_post = jq.jq(postscript).transform(finalized,multiple_output = False) after_post = stage.datamodel.create(after_post) log.info('finalized to: %s',after_post) addStepOrWorkflow(singlename, stage, after_post, inputs, spec) registerExpressions(stage, spec.get('register_values'))
def jq_stage(stage, spec): ''' :param stage: common stage parent object :param spec: stage JSON-like spec :return: None ''' binds = spec['bindings'] binds = process_jsonlike(binds, 'has("$wflowref")', lambda x: process_wflowref(x, stage.view)) log.info('transforming binds: %s', binds) stagescript = spec['stepscript'] singlesteps = jq.jq(stagescript).transform(binds, multiple_output=False) singlesteppars = map( lambda x: process_jsonlike(x, 'has("$wflowpointer")', process_wflowpointer), singlesteps) postscript = spec['postscript'] for i, pars in enumerate(singlesteppars): singlename = '{}_{}'.format(stage.name, i) finalized, inputs = finalize_input(pars, stage.view) log.info('postscripting: %s', finalized) after_post = jq.jq(postscript).transform(finalized, multiple_output=False) after_post = TypedLeafs(after_post) log.info('finalized to: %s', after_post) addStepOrWorkflow(singlename, stage, after_post, inputs, spec) registerExpressions(stage, spec.get('register_values'))
def addtoperm(perm_name, repo_name, group_name, group_perms, public_read): ''' perm_name (String): permission name to updated/add to repo_name (String): name of repo that the permissions is to be applied group_name (String): group name to add to permission group_perms (List): list of perms for the group public_read (Boolean): if True, anonymous users have read access ''' if public_read == True: anonymous_perms = ['r'] else: anonymous_perms = [''] repo_name = [repo_name] p = getperm(perm_name) # users, groups, repos and config are dictionaries repos = jq(".repositories").transform(json.loads(p.text)) new_repos = issubset(repo_name, repos) config = jq(".").transform(json.loads(p.text)) config['repositories'] = new_repos # each user/group and perms are its own key:value pair config['principals']['users'] = {'anonymous': anonymous_perms} config['principals']['groups'][group_name] = group_perms headers = {'Content-Type': 'application/json'} return requests.put(artifactory_url + 'security/permissions/' + perm_name, auth=HTTPBasicAuth(artifactory_user, artifactory_password), data=json.dumps(config), headers=headers, verify=verify_ssl)
def value_error_is_raised_if_program_is_invalid(): try: jq("!") assert False, "Expected error" except ValueError as error: expected_error_str = "jq: error: syntax error, unexpected INVALID_CHARACTER, expecting $end (Unix shell quoting issues?) at <top-level>, line 1:\n!\njq: 1 compile error" assert_equal(str(error), expected_error_str)
def run(self, input_object, input_array, input_string, filter): if input_object or input_array: return True, jq(filter).transform(input_object or input_array) elif input_string: return True, jq(filter).transform(text=input_string) else: return False, None
def jq_stage(stage, spec): """ :param stage: common stage parent object :param spec: stage JSON-like spec :return: None """ binds = spec["bindings"] binds = process_jsonlike(binds, 'has("$wflowref")', lambda x: process_wflowref(x, stage.view)) log.info("transforming binds: %s", binds) stagescript = spec["stepscript"] singlesteps = jq.jq(stagescript).transform(binds, multiple_output=False) singlesteppars = map( lambda x: process_jsonlike(x, 'has("$wflowpointer")', process_wflowpointer), singlesteps, ) postscript = spec["postscript"] for i, pars in enumerate(singlesteppars): singlename = "{}_{}".format(stage.name, i) finalized, inputs = finalize_input(pars, stage.view) log.info("postscripting: %s", finalized) after_post = jq.jq(postscript).transform(finalized, multiple_output=False) after_post = stage.datamodel.create(after_post) log.info("finalized to: %s", after_post) addStepOrWorkflow(singlename, stage, after_post, inputs, spec) registerExpressions(stage, spec.get("register_values"))
def value_error_is_raised_if_program_is_invalid(): try: jq("!") assert False, "Expected error" except ValueError as error: expected_error_str = """error: syntax error, unexpected INVALID_CHARACTER, expecting $end ! 1 compile error""" assert_equal(str(error), expected_error_str)
def test_002_to_003(): version = '0.0.2-alpha.0' tranformation_string = _get_transformation_dict(version) assert jq(tranformation_string['config']).transform( config_v002_gru_cosine) == config_v003_gru_cosine assert jq(tranformation_string['config']).transform( config_v002_lstm_rafael) == config_v003_lstm_rafael assert jq(tranformation_string['metrics']).transform( metrics_v002) == metrics_v003
def parse_args(argv): """ parse the command line arguments >>> parse_args('-q *'.split(' ')) {'url': 'https://*****:*****@timestamp', 'now-2h'), 'warn': 'false', 'crit': 'false'} :param argv: :return: """ # -H $_HOSTESQ_HOST$ --port $_HOSTESQ_PORT$ --url $_HOSTESQ_URL$ --query $_HOSTESQ_QUERY$ # --range $_HOSTESQ_RANGE$ -w $_HOSTESQ_WARN$ -c $_HOSTESQ_CRIT$ --data $_HOSTESQ_DATA$ parser = argparse.ArgumentParser() parser.add_argument('-H', '--host', default='localhost') parser.add_argument('-p', '--port', default="9200") parser.add_argument('-u', '--url', default='/_search') parser.add_argument('-s', '--secure', default=False) parser.add_argument('--cred', default='') parser.add_argument('-q', '--query', required=True) parser.add_argument('-d', '--data', default='') parser.add_argument('-r', '--range', default='now-2h') parser.add_argument('-w', '--warning', default='false') parser.add_argument('-c', '--critical', default='false') args = parser.parse_args(argv) if args.cred: cred = args.cred + '@' else: cred = '' url = urllib.parse.urlunsplit( ("https" if args.secure else "http", "%s%s:%s" % ( cred, args.host, args.port, ), args.url, '', '')) if '=' in args.range: range_field, range_date = args.range.split('=') else: range_field, range_date = "@timestamp", args.range try: return { "url": url, "query": args.query, "data": jq(args.data), "range": (range_field, range_date), "warn": jq(args.warning), "crit": jq(args.critical), "data_text": args.data, "warn_text": args.warning, "crit_text": args.critical, } except ValueError as ve: print('UNKNOWN - error while parsing jq expression: %s.' % (ve)) sys.exit(3)
def map_alert_to_hash(self, idea_alert): hash_formated = {} for k in self.mapping_hash.keys(): kk = self.get_first_key(self.mapping_hash[k]) if (jq(k).transform(json_idea, multiple_output=True)[0] == kk): hash_out = self.mapping_hash[k][kk] jq_output = jq(hash_out["jq"]).transform(idea_alert, multiple_output=True) for x in range(0, len(jq_output)): hash_formated[hash_out["map_to"][x]] = jq_output[x] return hash_formated
def parseData(workingDirectory): os.chdir(workingDirectory) rofFiles = glob.glob("*.rof") for rof in rofFiles: updateRof = rof.replace("original-","") with open(rof) as successFile: metadata = json.load(successFile) PIDArray = jq('.[] | select(.["af-model"] != "GenericFile") |.pid?, .metadata["dc:source"], .metadata["dc:title"]').transform(metadata, multiple_output=True) writePIDUpdateFile(PIDArray,rof) updateThumbs = jq('.[]|select(.["af-model"] != "GenericFile")|{type,pid,properties,"properties-meta"}').transform(metadata, multiple_output=True) writeROFUpdateFile(updateThumbs,updateRof,rof)
def get_index_file_name(data, recid, file_format): """Get the dataset specific index file name.""" if file_format == "json": index_file = jq.jq(".metadata._files").transform(data)[0]["key"] elif file_format == "txt": index_file = jq.jq(".metadata._files").transform(data)[1]["key"] else: index_file = None url = "http://opendata.cern.ch/record/{0}//files/{1}" \ .format(recid, index_file) return url
def get_userchannel(self, users, filter_user): """ Get the channel for all the users :param users: list of dictionaries {id: X, name: X} :param filter_user: arry of user which we will get the channel :return: a modified list of dictionaries with the channel {id: X, name: X, channel: X} """ for user in users: if user['name'] in filter_user: user_id = jq(".id").transform(user) answer = self.sc.api_call("im.open", user=user_id) user['channel'] = jq('.channel.id').transform(answer) return users
def jq_filter(obj, expr, parse=False, *args, **kwargs): if not HAS_JQ: raise AnsibleError( "You must install the jq Python module to use the jq filter.") try: prog = jq.jq(expr) except ValueError as e: raise AnsibleFilterError("Error compiling jq expression: %s" % to_native(e)) except Exception as e: raise AnsibleFilterError("Unknown error with jq expression: %s" % to_native(e)) if not isinstance(obj, str): try: obj = json.dumps(obj) except ValueError as e: raise AnsibleFilterError("Could not serialize object as JSON: %s" % to_native(e)) try: return prog.transform(text=obj, *args, **kwargs) except Exception as e: raise AnsibleFilterError("Error applying JSON expression to data: %s" % to_native(e))
def jq(self, jq_program, *args, **kwargs): return TypedLeafs( jq.jq(jq_program).transform(self.typed(idleafs=True), *args, **kwargs), self.leafmodel, idleafs=True, )
def extract_keys(response: dict, jq_filter: str = NON_EMPTY_FILEKEYS) -> List[Path]: parsed_aws_response = jsonify(response) key_strings = jq(jq_filter).transform(parsed_aws_response, multiple_output=True) keys = [Path(key) for key in key_strings] return keys
def _retrieve_data(self, data_source: LightspeedDataSource) -> pd.DataFrame: endpoint = nosql_apply_parameters_to_query(data_source.endpoint, data_source.parameters) data = self.bearer_oauth_get_endpoint(endpoint) data = jq(data_source.filter).transform(data) return pd.DataFrame(data)
def filter_events(events, mask): """Returns a list of elements matching a specified mask in the events list using jq module.""" if sys.platform == "win32": jq_cmd = 'jq -r "' + mask + '"' stdout = subprocess.check_output(jq_cmd, input=json.dumps(events).encode()) return stdout.decode("utf8").strip().split("\r\n") else: return jq(mask).transform(events, multiple_output=True)
def leaf_iterator(jsonable): if not isinstance(jsonable,(list,dict)): yield jsonpointer.JsonPointer(''), jsonable else: allleafs = jq.jq('leaf_paths').transform(jsonable, multiple_output = True) leafpointers = [jsonpointer.JsonPointer.from_parts(x) for x in allleafs] for x in leafpointers: yield x,x.get(jsonable)
def value_error_is_raised_if_input_cannot_be_processed_by_program(): program = jq(".x") try: program.transform(1) assert False, "Expected error" except ValueError as error: expected_error_str = "jq: error: Cannot index number with string" assert_equal(str(error), expected_error_str)
def filter_events(events, mask): """Returns a list of elements matching a specified mask in the events list using jq module.""" if sys.platform in ("win32", 'sunos5', 'darwin'): stdout = subprocess.check_output( ["jq", "-r", mask], input=json.dumps(events).encode()) return stdout.decode("utf8").strip().split(os.linesep) else: return jq(mask).transform(events, multiple_output=True)
def value_error_is_raised_if_input_cannot_be_processed_by_program(): program = jq(".x") try: program.transform(1) assert False, "Expected error" except ValueError as error: expected_error_str = "Cannot index number with string \"x\"" assert_equal(str(error), expected_error_str)
def value_error_is_raised_if_input_is_not_valid_json(): program = jq(".x") try: program.transform(text="!!") assert False, "Expected error" except ValueError as error: expected_error_str = "parse error: Invalid numeric literal at EOF at line 1, column 2" assert_equal(str(error), expected_error_str)
def _dump_workflow_dag(adageobj) -> Dict: serialized = json.dumps(adageobj.json(), cls=WithJsonRefEncoder, sort_keys=True) purejson = json.loads(serialized) return jq.jq("{dag: {edges: .dag.edges, nodes: \ [.dag.nodes[]|{metadata: {name: .task.metadata.name}, id: .id, \ jobid: .proxy.proxydetails.jobproxy}]}}").transform(purejson)
def value_error_is_raised_if_input_is_not_valid_json(): program = jq(".x") try: program.transform(text="!!") assert False, "Expected error" except ValueError as error: expected_error_str = "parse error: Invalid numeric literal\n" assert_equal(str(error), expected_error_str)
def errors_do_not_leak_between_transformations(): program = jq(".x") try: program.transform(1) assert False, "Expected error" except ValueError as error: pass assert_equal(1, program.transform({"x": 1}))
def __normalize(self,idleafs = True): #wrap in a simple dict, necessary for if data is just a leaf value data = {'data': self._load_from_string(self._dump_to_string(self._jsonable), typed=False)} if idleafs: magicexpr = ' or '.join(['startswith("{}")'.format(m) for m in self._leafmodel.magics]) ptrs = [jsonpointer.JsonPointer.from_parts(x) for x in jq.jq('paths(type=="string" and ({}))'.format(magicexpr)).transform(data, multiple_output = True)] for p in ptrs: p.set(data,self._leafmodel.leaf_decode(p.get(data))) self.__jsonable = data['data']
def __init__( self, actor_config, couchdb_url, view_path, filter_key, filter_value, op, ): FlowModule.__init__(self, actor_config) self.pool.createQueue("outbox") self.view_path = view_path self.filter_key = jq.jq(filter_key) self.filter_value = jq.jq(filter_value) self.op = getattr(operator, op) self.pool.createQueue('outbox') self.pool.createQueue('inbox') self.couchdb = Database(couchdb_url) self.registerConsumer(self.consume, 'inbox')
def get_userlist(self): """ Get the id and the name of all the users :return: a list of dictionaries with id and name [{id: X, name: X}] """ params = {"token": self.token, "pretty": 1} r = requests.get("https://slack.com/api/users.list", params=params) return jq( '.members[] | select (.deleted==false) | {id: .id, name: .name}' ).transform(r.json(), multiple_output=True)
def query(self, query): try: result = jq(query).transform(self.data(), multiple_output = True) except: return Knead(None, is_data = True) if (isinstance(result, list) and len(result) == 1): result = result[0] return Knead(result, is_data = True)
def get_cms_release(data): """Get the CMS SW release version.""" release = jq.jq(".metadata.system_details.release").transform(data) # Sometimes the release value begins wrongly with a white space if " " in release: release = release.replace(" ", "") # The slicing is needed to keep only the version number return release[6:]
def get_last_message(self, channel): """ Method to get the info from the last message :param channel: channel to look :return: the duple user,ts related to the last message """ info = self.sc.api_call("conversations.info", channel=channel) return jq( '.channel | { user: .user, latest_user: .latest.user, ts: .latest.ts}' ).transform(info)
def addusergroup(user_name, group_name): ''' makes user_name a member of group_name ''' u = getuser(user_name) groups = jq(".groups").transform(json.loads(u.text)) group_name = [group_name] new_groups = issubset(group_name, groups) config = jq(".").transform(json.loads(u.text)) config['groups'] = new_groups headers = {'Content-Type': 'application/json'} return requests.post(artifactory_url + 'security/users/' + user_name, auth=HTTPBasicAuth(artifactory_user, artifactory_password), data=json.dumps(config), headers=headers, verify=verify_ssl)
def get_contact_by_email(email): query = {'query': email, 'limit_rows': 1} resp = exec('get', 'contacts', query=query) if resp.status_code == 200: data = resp.json() return jq('._embedded.items[0].id').transform(data) elif resp.status_code == 204: return None else: # handle errors 500, 503 return None
def leaf_iterator(jsonable): if not isinstance(jsonable, (list, dict)): yield jsonpointer.JsonPointer(""), jsonable else: allleafs = jq.jq("leaf_paths").transform(jsonable, multiple_output=True) leafpointers = [ jsonpointer.JsonPointer.from_parts(x) for x in allleafs ] for x in leafpointers: yield x, x.get(jsonable)
def process_jsonlike(jsonlike, jq_obj_selector, callback): wflowrefs = [jsonpointer.JsonPointer.from_parts(x[1:]) for x in jq.jq( 'paths(if objects then {} else false end)'.format(jq_obj_selector) ).transform({'value': jsonlike}, multiple_output = True )] for wflowref in wflowrefs: value = callback(wflowref.resolve(jsonlike)) if wflowref.path == '': return value else: wflowref.set(jsonlike,value) return jsonlike
def __call__(self, r): if self.auth: session = Auth(**self.auth).get_session() else: session = Session() res = session.request(**self.request_kwargs) token = jq(self.filter).transform(res.json()) r.headers['Authorization'] = f'Bearer {token}' return r
def reconcile(q, lang): data = request(lang, { "action" : "query", "list" : "search", "srsearch" : q, "srlimit" : 3, "srinterwiki" : True, "srprop" : "score|pageid", "utf8" : True }) results = jq(".query.search").transform(data) results = _add_qids_to_pageids(results, lang) return results
def _jq(s, c): """ Process jq-string :param str s: jq-string starting with '$' :param str c: jq process context :return: jq-processed string """ _debug(" +++: jq in %s, %s", s, c) if isinstance(s, str) and s.startswith("$"): ret = jq(s[1:]).transform(c) else: ret = s _debug(" +++: jq out %s", ret) return ret
def docker(environment,parameters,state): environment = copy.deepcopy(environment) jsonpars = parameters.json() for p,v in parameters.leafs(): if p.path == '': jsonpars = v break p.set(jsonpars, v) for i,x in enumerate(environment['par_mounts']): script = x.pop('jqscript') x['mountcontent'] = jq.jq(script).transform(jsonpars, text_output = True) if environment['workdir'] is not None: environment['workdir'] = state.contextualize_value(environment['workdir']) return environment
def from_dict(cls, data): try: jsonschema.validate(data, cls._schema) except ValidationError as e: raise YamlParseError(str(e)) filter_ = data.get('filter', None) if filter_ is not None: try: jq_filter = jq(filter_) except ValueError as e: raise JqCompileError(str(e)) else: jq_filter = None return cls( format=data.get('format', 'plain'), value=data['value'], jq_filter=jq_filter, lookup_var=data.get('lookup-var', True), )
def iter_items(self, seek_to=None, reverse=False, regex=None, value_filter=None): """ iterates through the items in the database. `seek_to` - seeks to the given position if specified. defaults to the first record if reverse is False. defaults to the last record if reverse is True. `regex` - returns only items who's keys match the regex. `value_filter` - transforms item values (post regex match) using a jq filter. the jq filter syntax is the same as https://stedolan.github.io/jq/ """ if regex is not None: regex = re.compile(regex) if value_filter is not None: value_filter = jq(value_filter) _iter = self.rdb.iteritems() _iter = self._configure_iterator(_iter, seek_to=seek_to, reverse=reverse) unpackfn = self.unpackfn for (key, value) in _iter: if regex is not None and not regex.match(key): continue value = unpackfn(value) if value_filter is not None: value = value_filter.transform(value, multiple_output=True) if len(value) == 0: continue if len(value) == 1: value = value[0] yield (key, value)
def on_status(self, status): row = dict([(key, jq.jq(key).transform(status._json)) for key in self._keys]) self._loader._on_event(row)
def jq(self,jq_program, *args, **kwargs): return TypedLeafs(jq.jq(jq_program).transform(self.typed(idleafs = True), *args, **kwargs), self.leafmodel, idleafs = True)
def multiple_output_elements_are_returned_if_multiple_output_is_true(): assert_equal( [2, 3, 4], jq(".[]+1").transform([1, 2, 3], multiple_output=True) )
def can_add_one_to_each_element_of_an_array(): assert_equal( [2, 3, 4], jq("[.[]+1]").transform([1, 2, 3]) )
def input_string_is_parsed_to_json_if_raw_input_is_true(): assert_equal( 42, jq(".").transform(text="42") )
def output_is_serialised_to_json_string_if_text_output_is_true(): assert_equal( '"42"', jq(".").transform("42", text_output=True) )
def elements_in_text_output_are_separated_by_newlines(): assert_equal( "1\n2\n3", jq(".[]").transform([1, 2, 3], text_output=True) )
def first_output_element_is_returned_if_multiple_output_is_false_but_there_are_multiple_output_elements(): assert_equal( 2, jq(".[]+1").transform([1, 2, 3]) )
def parseJSON(decodedInfo, G): """ Function to read JSON data. Returns: Graph object extracted from data. """ nodeData = [] nodeAttr = [] edgeToData = [] edgeFromData = [] edgeAttr = [] if not JQ: for i in {2, 4, 5}: decodedInfo[i] = jsonPathFix(decodedInfo[i]) # Create metamap of decodedInfo for readability. metaMap = {'name':decodedInfo[0], 'location':decodedInfo[1], 'nodeinfo':decodedInfo[2], 'nodeattr':decodedInfo[3], 'edgefrom':decodedInfo[4], 'edgeto':decodedInfo[5], 'edgeattr':decodedInfo[6]} # Check for existance of attributes, and create temp array if they exist. if metaMap['nodeattr'] != "None": nodeAttrTemp = metaMap['nodeattr'].split(",") if metaMap['edgeattr'] != "None": edgeAttrTemp = metaMap['edgeattr'].split(",") if JQ: if metaMap['nodeinfo'] != "None": jqNode = jq.jq(metaMap['nodeinfo']) if metaMap['edgefrom'] != "None": jqEdgeFrom = jq.jq(metaMap['edgefrom']) if metaMap['edgeto'] != "None": jqEdgeTo = jq.jq(metaMap['edgeto']) try: with open(metaMap['location']) as jsonFile: for jsonLine in jsonFile: attr = {} edgeAttr = {} try: jsonLine = json.loads(jsonLine) # Ensure nodeinfo and nodeattr exists within the rdf ------------------------------------- if metaMap['nodeinfo'] != "None": if JQ: record = jqNode.transform(jsonLine) else: record = [match.value for match in parse(metaMap['nodeinfo']).find(jsonLine)][0] if record != "": n = str(record) record = ""; if metaMap['nodeattr'] != "None": for j in range(0,len(nodeAttrTemp)): try: if JQ: record = jq.jq(nodeAttrTemp[j].split(":")[1]).transform(jsonLine) else: record = [match.value for match in parse(jsonPathFix(nodeAttrTemp[j].split(":")[1])).find(jsonLine)][0] if record != "": attr.update({str(nodeAttrTemp[j].split(":")[0]) : str(record)}) record = ""; except (ValueError, IndexError): attr.update({str(nodeAttrTemp[j].split(":")[0]) : 'None'}) else: attr = {'Attr':'None'} #print n , " " , attr G.add_node(n,attr_dict=attr) # Ensure edgefrom, edgeto and edgeattr exist within the rdf ------------------------------------- if metaMap['edgefrom'] != "None" and metaMap['edgeto'] != "None": # Extract edge from data from JSON and check is not null try: if JQ: record = jqEdgeFrom.transform(jsonLine) else: record = [match.value for match in parse(metaMap['edgefrom']).find(jsonLine)][0] if record != "": f = str(record) record = ""; except (ValueError, IndexError): f = 'None' # Extract edge from data from JSON and check is not null try: if JQ: record = jqEdgeTo.transform(jsonLine) else: record = [match.value for match in parse(metaMap['edgeto']).find(jsonLine)][0] if record != "": t = str(record) record = ""; except (ValueError, IndexError): t = 'None' if metaMap['edgeattr'] != "None": for j in range(0,len(edgeAttrTemp)): try: if JQ: record = jq.jq(edgeAttrTemp[j].split(":")[1]).transform(jsonLine) else: record = [match.value for match in parse(jsonPathFix(edgeAttrTemp[j].split(":")[1])).find(jsonLine)][0] if record != "": attr.update({str(edgeAttrTemp[j].split(":")[0]) : str(record)}) record = ""; except (ValueError, IndexError): attr.update({str(edgeAttrTemp[j].split(":")[0]) : 'None'}) G.add_edge(f, t, attr_dict=attr) except IndexError: traceback.print_exc() pass except Exception: traceback.print_exc() return G
def _compile_jq(jq_expr): """Compile JQ expression, returning a JQ program object See: https://pypi.python.org/pypi/jq """ import jq return jq.jq(jq_expr)
def unicode_strings_can_be_used_as_input(): assert_equal( "‽", jq(".").transform(text='"‽"') )
print("Getting Skill Data...", end = ' ') rawSkillsData = requests.get(diyUrl(), params=PARAMS['skills']) print("Done.") # Get Skill JSON rawSkillJSON = rawSkillsData.json() # Get Skill JSON length if len(rawSkillJSON['response']) == PARAMS['skills']['limit']: print("\033[91mWARN: Skill list has length of {}, may want to increase limit?\033[00m".format(PARAMS['skills']['limit'])) else: print("JSON items: {0} out of {1}, {2} left".format(len(rawSkillJSON['response']), PARAMS['skills']['limit'], PARAMS['skills']['limit']-len(rawSkillJSON['response']))) # Filter Skill JSON (list) and add to data (dict) print("Filtering Skill Data...", end = ' ') skillData = jq.jq(FILTERS['skills']).transform(rawSkillJSON) DATA['skills'] += skillData print("Done.") # Filter Skill JSON to get skill names print("Getting list of Skills...", end = ' ') skills = jq.jq(FILTERS['skillList']).transform(skillData) print("Done.") # For each skill, grab challenge list, check length, filter, add to data (dict) print("Getting challenge data...") for skill in skills: # Get achievements print("\tSkill: {}...".format(skill), end=' ') rawChallengeData = requests.get(diyUrl(skill), params=PARAMS['challenges'])
def output_of_dot_operator_is_input(): assert_equal( "42", jq(".").transform("42") )
def unicode_strings_can_be_used_as_programs(): assert_equal( "Dragon‽", jq('.+"‽"').transform(text='"Dragon"') )
def multiple_inputs_in_raw_input_are_separated_by_newlines(): assert_equal( [2, 3, 4], jq(".+1").transform(text="1\n2\n3", multiple_output=True) )