def main(): prefix = "xcodebuild/DebugX64/dart" postfix = "> ../my_demo/flow_graph.log 2>&1" input_json = '''[ "--no-background-compilation", "--optimization-counter-threshold=1", // "--print_classes", "--print-flow-graph", "--print-flow-graph-optimized", "--disassemble-optimized", "--disassemble", // "--trace_compiler", // "--trace_optimizing_compiler", // "--trace_compilation_trace", // "--trace_osr", "--trace_inlining", // "--trace_range_analysis", // "--trace_ssa_allocator", // "--trace_type_propagation", "--inlining_callee_size_threshold=1", "--inlining_caller_size_threshold=1", "--print-flow-graph-filter=main.dart", "${workspaceFolder}/../my_demo/main.dart", ]''' parser = JsonComment(json) json_array = parser.loads(input_json) # print(json_array) json_array[:] = map(lambda x: x.replace("${workspaceFolder}/", ""), json_array) json_array.insert(0, prefix) json_array.append(postfix) print("cmdline:") print(" ".join(json_array))
def get_mutual_fund_quote(symbol): rsp = requests.get(FINANCE_URL.format(symbol)) content = rsp.content[6:-2].decode('utf-8') content = content.replace("\n", "") print(type(content)) parser = JsonComment(json) fin_data = parser.loads(content) performance = fin_data['performance'] print(performance) print("----------------------")
def main(triggerblob: func.InputStream, outputblob: func.Out[str]): parser = JsonComment(json) logging.info(f"Python blob trigger function processed blob \n" f"Name: {triggerblob.name}\n" f"Blob Size: {triggerblob.length} bytes") raw_text = triggerblob.read().decode("utf-8") py_obj = parser.loads(raw_text) str_parsed = json.dumps(py_obj) outputblob.set(str_parsed)
def scan(self, ip_list, port_list, bin_path, opts="-sS -Pn -n --wait 0 --max-rate 5000"): """Executes masscan on given IPs/ports""" bin_path = bin_path opts_list = opts.split(' ') port_list = ','.join([str(p) for p in port_list]) ip_list = ','.join([str(ip) for ip in ip_list]) process_list = [bin_path] process_list.extend(opts_list) process_list.extend(['-oJ', '-', '-p']) process_list.append(port_list) process_list.append(ip_list) proc = subprocess.run(process_list, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) out = proc.stdout.decode('utf-8') if proc.stdout else '[]' parser = JsonComment(json) result = parser.loads(out) return result
def _load(self, text): parser = JsonComment(json) # standard JSON is stupid try: data = parser.loads(text) except ValueError as e: # parse the error message and split the input at the # syntax error i = int(re.search(r'char (.*?)\)', str(e)).group(1)) b = text[:i] a = text[i:] # find the line with the error and report it # 'blen' in honor of our intern line1 = (b[b.rfind('\n'):] + a[:a.find('\n')]).strip() blen = len((b[b.rfind('\n'):]).strip()) line2 = ' ' * blen + '^' raise CommandError(self, f'syntax error\n{line1}\n{line2}\n{e}') return data
def check_sold_out(s, product_url): r = s.get(product_url, headers=get_headers) soup = bs(r.text, 'lxml') # print(soup) data_size = soup.find_all('script', {'type': 'text/javascript'}) text_data = data_size[10].text data = re.findall(r"var item_stock =(.+?);\n", text_data, re.S) text_data = data[0] parser = JsonComment(json) j = parser.loads(text_data) #json with all the data we need colors_data_dict = j['colors'] available_sizes = get_avail_sizes(colors_data_dict) if available_sizes == {}: print(gettime() + Fore.YELLOW + ' [WARN.] -> Product is soldout, retrying...') time.sleep(delay) check_sold_out(s, product_url) else: return available_sizes, soup
def attempt_load_mcmodinfo(filepath): """ Returns the mcmod.info file's contents (as dictionary) if available :param filepath: string, absolute path to the mod file :return: dict """ logger.info("Attempting to load MCMod.info from {}".format(filepath)) parser = JsonComment(json) try: with zipfile.ZipFile(filepath, 'r') as modfile: try: with modfile.open('mcmod.info') as info: #print(info.read().decode('utf-8')) #i = json.loads(info.read().decode('utf-8').replace("\n", "")) try: logger.debug("Attempting to parse MCMod.info...") i = parser.loads(info.read().decode('utf-8'), strict=False) except UnicodeDecodeError: logger.warning("Decoding failed, skipping") i = None logger.debug( "MCModInfo data parsed to be:\n```\n{}\n```".format( pformat(i))) logger.info("Successfully loaded mod info: {}".format(i)) return i except KeyError as e: logger.error( "Failed to load MCMod.info from {} as it's not present in the archive." .format(filepath)) except Exception as ex: logger.error("Failed to load MCMod.info from {} due to {} ({})".format( filepath, type(ex).__name__, ex.args)) #return None raise
class ModJson: def __init__(self) -> None: self._loader = JsonComment() def load(self, filepath: str) -> dict: try: with open(filepath) as f: content = self._cleanup(f.read()) return self._loader.loads(content) except Exception as e: raise Exception(f'filepath = {filepath} error = [{type(e)}] {e}') def _cleanup(self, content: str) -> str: _clean = re.sub('(?<![:"])//.*\n', '', content) _clean = re.sub(r'/\*[^*]*\*/', '', _clean) return _clean def save(self, filepath: str, data: dict): try: with open(filepath, mode='w', newline='\r\n') as f: f.write(json.dumps(data, indent=2)) except Exception as e: raise Exception(f'filepath = {filepath} error = [{type(e)}] {e}')
if re.match('^schema', file): sys.exit(0) print('Validating %s via %s' % (file, schema_name)) with open(schema_name, 'r') as f: schema_data = json.load(f) with open(file, 'r') as f: jstr = f.read() jstr_no_bom = decode(jstr) failed = file + '.failed' if os.path.exists(failed): os.remove(failed) try: parser = JsonComment(json) json_data = parser.loads(jstr_no_bom) validate(json_data, schema_data) except Exception as e: trace = traceback.format_exc() print(trace) with open(failed, 'a+') as f: f.write(trace) sys.exit(1) sys.exit(0)
priority += 5 if urlDomain(data["url"]): priority += 50 if str(data["info"]["as"]).find("666") >= 0: priority += 50 iso = data["info"]["iso"] if iso.find("US") >= 0: priority -= 10 elif iso.find("RU") >= 0: priority += 10 elif iso.find("CN") >= 0 or iso.find("TW") >= 0: priority -= 30 return priority data = '' print("Enter data: ") data = '\n'.join(iter(input, data)) parser = JsonComment(json) data = parser.loads(data) priority = changePriority(data) print(priority)
def main(encoding: str, inputblob: bytes) -> str: parser = JsonComment(json) logging.info(f"@@@@@@ {encoding}! have waiting to use for devode") return json.dumps(parser.loads(inputblob.decode(encoding)))
# Objects { "key" : "value", "another key" : \"\"\" \\n A multiline string.\\n It will wrap to single line, but a trailing space per line is kept. \"\"\", }, ; Other Values 81, ; Allow a non standard trailing comma true, ] """ parser = JsonComment(json) parsed_object = parser.loads(string) print("\n", "*"*80, "\n") print(parsed_object[0]["another key"], "\n") print(parser.dumps(parsed_object), "\n") print("\n", "*"*80, "\n") ################################################################################