def gen_json(obj, outfile, format): if format == 'json': hjson.dumpJSON(obj, outfile, ensure_ascii=False, use_decimal=True, indent=' ', for_json=True) elif format == 'compact': hjson.dumpJSON(obj, outfile, ensure_ascii=False, for_json=True, use_decimal=True, separators=(',', ':')) elif format == 'hjson': hjson.dump(obj, outfile, ensure_ascii=False, for_json=True, use_decimal=True) else: raise ValueError('Invalid JSON format ' + format) return 0
async def on_ready(): guild = client.get_guild(utilities.get_guildID()) role_name_to_obj = { role.name: { "name": role.name, "mention": role.mention } for role in guild.roles } key_name = ("test_" if os.getenv("mode") == "test" else "") + "study_roles" if os.getenv("mode") == "test": utilities.config["test_study_roles"] = copy.deepcopy( utilities.config["study_roles"]) for key, val in utilities.config["study_roles"].items(): print(role_name_to_obj[key]) utilities.config[key_name][key]["name"] = role_name_to_obj[key]["name"] utilities.config[key_name][key]["mention"] = role_name_to_obj[key][ "mention"] with open("config.hjson", "w") as f: hjson.dump(utilities.config, f) await client.logout()
def _main(): isas = list( itertools.product(TOPLEVELS, OPCODE_TYPES, INSTR_TYPES, TERMINATE_TYPES)) print(LINE_SEP) print(green("Launching %d experiments ..." % (len(isas) * len(RUNS)))) print(LINE_SEP) # create a temp dir to store config files try: tmp_dir = os.path.join(os.getcwd(), "tmp%d") i = 0 while os.path.isdir(tmp_dir % i): i += 1 tmp_dir = tmp_dir % i os.mkdir(tmp_dir) # create config files on the fly and launch experiments for toplevel, opcode_type, instr_type, terminate_type in isas: for run in RUNS: # craft config dictionary cdict = copy.deepcopy(CONFIG_DICT) # Set experiment name experiment_name = EXPERIMENT_BASE_NAME % ( toplevel, opcode_type, instr_type, terminate_type, run) experiment_name = experiment_name.replace("_", "-") # print(experiment_name) # continue cdict["experiment_name"] = experiment_name cdict["toplevel"] = toplevel # Set configurations cdict["fuzzer_params"]["duration_mins"] = DURATION_MINS cdict["model_params"]["opcode_type"] = opcode_type cdict["model_params"]["instr_type"] = instr_type if terminate_type == "invalidop": cdict["model_params"]["terminate_on_invalid_opcode"] = 1 else: cdict["model_params"]["terminate_on_invalid_opcode"] = 0 # write to HJSON file hjson_filename = experiment_name + ".hjson" hjson_file_path = os.path.join(tmp_dir, hjson_filename) with open(hjson_file_path, "w") as fp: hjson.dump(cdict, fp) # launch fuzz the DUT fuzz(["--fail-silently", hjson_file_path]) # cleanup config file os.remove(hjson_file_path) finally: for directory in glob.glob("tmp*"): shutil.rmtree(directory) print(LINE_SEP) print(green("DONE!")) print(LINE_SEP)
def execute_newsplease_cli(newspaper_url): # global procs_pid # config_path = pd.read_csv('configs_path.csv')['config_path'].tolist()[0] # general_config = 'config.cfg' # news_config = 'sitelist.hjson' with lock: sleep(20) config_path = pd.read_csv('configs_path.csv')['config_path'].tolist()[0] general_config = 'config.cfg' news_config = 'sitelist.hjson' f = open(os.path.join(config_path, news_config), 'r') nc = hjson.load(f) nc['base_urls'][0]['url'] = newspaper_url f = open(os.path.join(config_path, news_config), 'w') hjson.dump(nc, f) f.close() config = configparser.RawConfigParser() config.read(os.path.join(config_path,general_config)) config.set('Scrapy', 'JOBDIRNAME', 'jobdir'+newspaper_url.split('.')[1]+str(datetime.today()).replace(':','-')) with open(os.path.join(config_path,general_config), 'w') as configfile: config.write(configfile) # os.system('cmd /k "news-please"') proc=subprocess.Popen(['news-please'], shell=False) f = open('json_data/temp/process_PIDs.txt', 'a') f.write('\n '+str(proc.pid)) f.close()
async def save(json=None, file=None, db=None): if json and file: with open(file, 'w') as f: print('Saving to disk..') hjson.dump(json, f, indent=4) print('Saved successfully!') if db: db.commit()
def write_results_as_hjson(self, outpath: Path) -> None: with outpath.open("w") as results_file: # Construct results dict for Hjson file. hjson.dump(self.buckets, results_file, ensure_ascii=False, for_json=True, use_decimal=True)
def render(self, postUrl): post = self.loadPost(postUrl) if post is not None: filePost = os.path.join(self.postsFolder, postUrl) fileaddr = '/home/jmramoss/hjson.txt' fileaddr = filePost outfile = open(fileaddr, 'w') print '>>>>>>>>>>> postUrl -> ' + postUrl formatDate = '%d/%m/%Y %H:%M:%S' currentTime = datetime.datetime.now() textCurrentTime = currentTime.strftime(formatDate) lastModification = os.path.getmtime(filePost) fechaLastModification = datetime.datetime.fromtimestamp( lastModification) textFechaLastModification = fechaLastModification.strftime( formatDate) post['creation'] = {} if 'creation' not in post else post[ 'creation'] post['creation'][ 'author'] = self.site['author'] if 'author' not in post[ 'creation'] else post['creation']['author'] post['creation'][ 'date'] = textFechaLastModification if 'date' not in post[ 'creation'] else post['creation']['date'] post['publish'] = {} if 'publish' not in post else post['publish'] post['publish'][ 'author'] = self.site['author'] if 'author' not in post[ 'publish'] else post['publish']['author'] post['publish']['date'] = textCurrentTime if 'date' not in post[ 'publish'] else post['publish']['date'] post['images'] = {} if 'images' not in post else post['images'] post['images']['title'] = 'imgtitle.jpg' if 'title' not in post[ 'images'] else post['images']['title'] post['images']['banner'] = 'imgbanner.jpg' if 'banner' not in post[ 'creation'] else post['images']['banner'] post['images']['icon'] = 'imgicon.jpg' if 'icon' not in post[ 'creation'] else post['images']['icon'] post['modification'] = [] if 'modification' not in post else post[ 'modification'] lenModification = len(post['modification']) limitModification = 10 if lenModification >= limitModification: post['modification'] = post['modification'][ 0:limitModification] lastModification = {} lastModification['author'] = self.site['author'] lastModification['date'] = textFechaLastModification post['modification'].insert(0, lastModification) hjson.dump(post, outfile, encoding='UTF-8')
def main(): fmt = "hjson" args = [] for arg in sys.argv[1:]: if arg == "-h" or arg == "--help": showerr(HELP) return elif arg == "-j": fmt = "json" elif arg == "-c": fmt = "compact" elif arg == "-V" or arg == "--version": showerr("Hjson " + hjson.__version__) return elif arg[0] == "-": showerr(HELP) raise SystemExit("unknown option " + arg) else: args.append(arg) outfile = sys.stdout if len(args) == 0: infile = sys.stdin elif len(args) == 1: infile = open(args[0], "r") else: showerr(HELP) raise SystemExit("unknown options") with infile: try: obj = hjson.load(infile, use_decimal=True) except ValueError: raise SystemExit(sys.exc_info()[1]) with outfile: if fmt == "json": hjson.dumpJSON(obj, outfile, ensure_ascii=False, use_decimal=True, indent=" ") elif fmt == "compact": hjson.dumpJSON( obj, outfile, ensure_ascii=False, use_decimal=True, separators=(",", ":"), ) else: hjson.dump(obj, outfile, ensure_ascii=False, use_decimal=True) outfile.write("\n")
def main(): parser = argparse.ArgumentParser( description="""This script parses AscentLint log and report files from a lint run, filters the messages and creates an aggregated result .hjson file with the following fields: {"tool": "ascentlint", "errors" : [], "warnings" : [], "lint_errors" : [], "lint_warnings" : [], "lint_infos" : []} The fields 'errors' and 'warnings' contain file IO messages or messages output by the tool itself, whereas the fields prefixed with 'lint_' contain lint-related messages. The script returns nonzero status if any warnings or errors are present. """) parser.add_argument('--repdir', type=str, default="./", help="""The script searches the 'ascentlint.log' and 'ascentlint.rpt' files in this directory. Defaults to './'""") parser.add_argument('--outdir', type=str, default="./", help="""Output directory for the 'results.hjson' file. Defaults to './'""") args = parser.parse_args() results = get_results(args.repdir) with Path(args.outdir).joinpath("results.hjson").open("w") as results_file: hjson.dump(results, results_file, ensure_ascii=False, for_json=True, use_decimal=True) # return nonzero status if any warnings or errors are present # lint infos do not count as failures nr_errors = len(results["errors"]) + len(results["lint_errors"]) nr_warnings = len(results["warnings"]) + len(results["lint_warnings"]) if nr_errors > 0 and nr_warnings > 0: print("Lint not successful, got %d warnings and %d errors." % (nr_warnings, nr_errors)) sys.exit(1) print("Lint successful, got %d warnings and %d errors." % (nr_warnings, nr_errors)) sys.exit(0)
def main(): parser = argparse.ArgumentParser( description="""This script parses verible lint log files from a lint run, filters the messages and creates an aggregated result .hjson file with the following fields: {"tool": "verilator", "errors" : [], "warnings" : [], "lint_errors" : [], "lint_warnings" : [], "lint_infos" : []} The fields 'errors' and 'warnings' contain file IO messages or messages output by the tool itself, whereas the fields prefixed with 'lint_' contain lint-related messages. The script returns nonzero status if any warnings or errors are present. """) parser.add_argument('--logpath', type=str, default="lint.log", help=('FPV log file path. Defaults to `lint.log` ' 'under the current script directory.')) parser.add_argument( '--reppath', type=str, default="results.hjson", help=('Parsed output hjson file path. Defaults to ' '`results.hjson` under the current script directory.')) args = parser.parse_args() results = get_results(args.logpath) with Path(args.reppath).open("w") as results_file: hjson.dump(results, results_file, ensure_ascii=False, for_json=True, use_decimal=True) # return nonzero status if any warnings or errors are present # lint infos do not count as failures n_errors = len(results["errors"]) + len(results["lint_errors"]) n_warnings = len(results["warnings"]) + len(results["lint_warnings"]) if n_errors > 0 or n_warnings > 0: log.info("Found %d lint errors and %d lint warnings", n_errors, n_warnings) sys.exit(1) log.info("Lint logfile parsed succesfully") sys.exit(0)
def _run_simulation(config_dict, output_log_file_name, aes_test): # Get path of experiment log file exp_log = os.path.join(os.getenv("HW_FUZZING"), "data", config_dict["experiment_name"], "logs", "exp.log") # Set correct seed file config_dict["fuzzer_params"]["seed"] = aes_test.hwf_seed # Write configs to a temp HJSON file with open(TMP_HJSON_CONFIG, "w") as fp: hjson.dump(config_dict, fp) # Run encrypt test in HWFP fuzz(["-y", "--log-driver", "none", "-s", TMP_HJSON_CONFIG]) shutil.copy(exp_log, output_log_file_name)
def save_signature(self, signature): filename = os.path.join( self._signatures_root, self._project, signature['name'] + '.sig') i = 1 while os.path.exists(filename): filename = os.path.join(self._signatures_root, self._project, signature['name'] + '.{}.sig'.format(i)) i += 1 with open(filename, 'w') as f: hjson.dump(signature, f, indent=4)
def _get_dockerdriver(self, stalk_name, **overrides): config_path = pathlib.Path(f"stalks/{stalk_name}/{stalk_name}.hjson") config = hjson.load(open(self.cwd.joinpath(config_path), 'r')) config.update(self.config.get('architecture')) config.update(self.config.get('paths')) config.update(self.config.get('globals',{})) config.update({**overrides,**self._get_overrides(stalk_name)}) config_path = pathlib.Path(tempfile.mkdtemp()).joinpath(f"{stalk_name}.hjson") hjson.dump(config,config_path.open('w',encoding='utf-8')) return DockerDriver(self.cwd,config_path)
def create_symbol(self): """ Create a temporary symbol signature from the current function on the IDA screen. """ self.log('creating temporary signature') current_ea = idc.get_screen_ea() signature = { 'name': idc.get_func_name(current_ea), 'instructions': [] } if self._create_template_symbol: find_bytes_ida = "find-bytes-ida '" for ea in idautils.FuncItems(current_ea): mnem = idc.print_insn_mnem(ea).lower() opcode_size = idc.get_item_size(ea) # ppc if mnem.startswith('b') or mnem in ('lis', 'lwz', 'addi'): # relative opcodes find_bytes_ida += '?? ' * opcode_size continue # arm if mnem.startswith('b') or mnem in ('ldr', 'str'): # relative opcodes find_bytes_ida += '?? ' * opcode_size continue opcode = binascii.hexlify(idc.get_bytes(ea, opcode_size)) formatted_hex = ' '.join(opcode[i:i + 2] for i in range(0, len(opcode), 2)) find_bytes_ida += formatted_hex + ' ' find_bytes_ida += "'" signature['instructions'].append(find_bytes_ida) signature['instructions'].append('function-start') signature['instructions'].append('set-name "{}"'.format( idc.get_func_name(current_ea))) with open(TEMP_SIG_FILENAME, 'w') as f: hjson.dump(signature, f, indent=4) self.log('Signature created at {}'.format(TEMP_SIG_FILENAME)) return TEMP_SIG_FILENAME
def main(): format = 'hjson' args = [] for arg in sys.argv[1:]: if arg == '-h' or arg == '--help': showerr(HELP) return elif arg == '-j': format = 'json' elif arg == '-c': format = 'compact' elif arg == '-V' or arg == '--version': showerr('Hjson ' + pkg_resources.require("Hjson")[0].version) return elif arg[0] == '-': showerr(HELP) raise SystemExit('unknown option ' + arg) else: args.append(arg) outfile = sys.stdout if len(args) == 0: infile = sys.stdin elif len(args) == 1: infile = open(args[0], 'r') else: showerr(HELP) raise SystemExit('unknown options') with infile: try: obj = hjson.load(infile, use_decimal=True) except ValueError: raise SystemExit(sys.exc_info()[1]) with outfile: if format == 'json': hjson.dumpJSON(obj, outfile, use_decimal=True, indent=' ') elif format == 'compact': hjson.dumpJSON(obj, outfile, use_decimal=True, separators=(',', ':')) else: hjson.dump(obj, outfile, use_decimal=True) outfile.write('\n')
def to_file(self, file_path: Path, header: Optional[str] = ""): obj = {} obj['instance_name'] = self.instance_name obj['param_values'] = self.param_values with open(file_path, 'w') as fp: if header: fp.write(header) hjson.dump(obj, fp, ensure_ascii=False, use_decimal=True, for_json=True, encoding='UTF-8', indent=2) fp.write("\n")
async def on_ready(): guild = client.get_guild(utilities.get_guildID()) monitored_categories = dict() for category in guild.categories: if category.name[0] == "🔊" or category.name in ["staff", "STAFF"]: monitored_categories[category.name] = category.id key_name = ("test_" if os.getenv("mode") == "test" else "") + "monitored_categories" config[key_name] = monitored_categories with open("config.hjson", "w") as f: hjson.dump(config, f) await client.logout()
def dump(self): # Rebuilds data self.talkData.update() self.jobData.update() self.gameText.update() self.itemData.update() self.pcData.update() self.objectData.update() self.shopData.update() self.abilityData.update() self.abilitySetData.update() # Print spoiler logs self.spoilerLog() # Dump settings outFile = os.path.join(self.outPath, 'settings.json') with open(outFile, 'w') as file: hjson.dump(self.settings, file)
def create_symbol(self): """ Create a temporary symbol signature from the current function on the IDA screen. """ self.log('creating temporary signature') signature = { 'name': idc.get_func_name(idc.get_screen_ea()), 'type': 'function', 'instructions': [] } with open(TEMP_SIG_FILENAME, 'w') as f: hjson.dump(signature, f, indent=4) self.log('Signature created at {}'.format(TEMP_SIG_FILENAME)) return TEMP_SIG_FILENAME
def save_signature(self, signature): """ Save given signature object (by dictionary) into active project as a new SIG file. If symbol name already exists, then create another file (never overwrites). :param signature: Dictionary of signature object :return: None """ filename = os.path.join(self._signatures_root, self._project, signature['name'] + '.sig') i = 1 while os.path.exists(filename): filename = os.path.join(self._signatures_root, self._project, signature['name'] + '.{}.sig'.format(i)) i += 1 with open(filename, 'w') as f: hjson.dump(signature, f, indent=4)
def init(): """ Initializes tyme environment with .tyme folder and initial files. """ if not os.path.isdir(TYME_DIR): os.mkdir(TYME_DIR) os.mkdir(TYME_TIMELINES_DIR) if not os.path.exists(TYME_STATE_FILE): print("Couldn't find any users, tyme has probably not been setup yet.") user = input("What user would you like to use for your timeline?\n" "username: ") state = {'default_user': user} with open(TYME_STATE_FILE, 'w') as state_file: hjson.dump(state, state_file) Timeline.make_empty(user)
def main(): parser = argparse.ArgumentParser() parser.add_argument('src', metavar='FILE', type=argparse.FileType('r'), help='Read test vectors from this JSON file.') parser.add_argument('dst', metavar='FILE', type=argparse.FileType('w'), help='Write output to this file.') args = parser.parse_args() testvecs = parse_test_vectors(json.load(args.src)) args.src.close() hjson.dump(testvecs, args.dst) args.dst.close() return 0
def serialize_noisemodel(noisemodel,\ filename): """ Save the noisemodel to a file, serialize via json Args: noisemodel (HErmes.fitting.model.Model) : the fitted noisemodel filename (str) : full path to file """ nm_data = dict() nm_data['I_L'] = noisemodel.I_L nm_data['A_f'] = noisemodel.A_f nm_data['R_S'] = noisemodel.R_S nm_data['chi2/ndf'] = noisemodel.chi2_ndf nm_data['npoint'] = len(noisemodel.data) nm_data['detid'] = noisemodel.detid nm_data['strip'] = noisemodel.stripname logger.info(f'Serializing noisemodel to {filename}') hjson.dump(nm_data, open(filename, 'w')) return None
def _main(): print(LINE_SEP) print(green("Launching %d experiments ..." % (len(TOPLEVELS) * len(RUNS)))) print(LINE_SEP) # create a temp dir to store config files with tempfile.TemporaryDirectory() as tmp_dir: # create config files on the fly and launch experiments for toplevel in TOPLEVELS: for run in RUNS: # craft config dictionary cdict = copy.deepcopy(CONFIG_DICT) # Set experiment name experiment_name = EXPERIMENT_BASE_NAME % (toplevel, DURATION_MINS, run) experiment_name = experiment_name.replace("_", "-").lower() cdict["experiment_name"] = experiment_name cdict["toplevel"] = toplevel # Set configurations cdict["fuzzer_params"]["duration_mins"] = DURATION_MINS # write to HJSON file hjson_filename = experiment_name + ".hjson" hjson_file_path = os.path.join(tmp_dir, hjson_filename) with open(hjson_file_path, "w") as fp: hjson.dump(cdict, fp) # launch fuzz the DUT # fuzz(["--fail-silently", hjson_file_path]) fuzz([ "-y", "--gcp-config-filename", "gcp_config.east1b.hjson", hjson_file_path ]) # cleanup config file os.remove(hjson_file_path) print(LINE_SEP) print(green("DONE!")) print(LINE_SEP)
def main(): todo = "-h" args = [] for arg in sys.argv[1:]: if arg[0] == '-': todo = arg else: args.append(arg) if len(args) == 0: infile = sys.stdin outfile = sys.stdout elif len(args) == 1: infile = open(args[0], 'r') outfile = sys.stdout elif len(args) == 2: infile = open(args[0], 'r') outfile = open(args[1], 'w') else: raise SystemExit(sys.argv[0] + " {-h|-j|-c} [infile [outfile]]") with infile: try: obj = hjson.load(infile, use_decimal=True) except ValueError: raise SystemExit(sys.exc_info()[1]) with outfile: if todo == '-j': hjson.dumpJSON(obj, outfile, use_decimal=True, indent=" ") elif todo == '-c': hjson.dumpJSON(obj, outfile, use_decimal=True, separators=(',', ':')) else: hjson.dump(obj, outfile, use_decimal=True) outfile.write('\n')
def main(): parser = argparse.ArgumentParser() parser.add_argument('--verbose', action='store_true') parser.add_argument('n', type=int, help='Number of random test vectors to generate.') parser.add_argument('--tests-per-key', metavar='num', type=int, required=False, help='Number of test vectors to generate per key ' 'generation (default=1). Increase for faster test ' 'generation.') parser.add_argument('outfile', metavar='FILE', type=argparse.FileType('w'), help='Write output to this file.') args = parser.parse_args() print(args) random.seed() testvecs = [] key = None for i in range(args.n): if args.verbose: print('Generating test case {}'.format(i)) if i % args.tests_per_key == 0: # Generate a new RSA key key = RSA.generate(3072, e=65537) testvecs.append(gen_random_test(key, i)) hjson.dump(testvecs, args.outfile) args.outfile.close() return 0
def main(): num_experiments = len(NUM_STATES) * len(COMP_WIDTHS) * len(RUNS) print(LINE_SEP) print(LINE_SEP) print(LINE_SEP) print(color_str_green("LAUNCHING %d EXPERIMENTS ..." % num_experiments)) print(LINE_SEP) print(LINE_SEP) print(LINE_SEP) for run in RUNS: for states in NUM_STATES: for width in COMP_WIDTHS: # craft config dictionary cdict = copy.deepcopy(BASE_CONFIG_DICT) experiment_name = EXPERIMENT_BASE_NAME % (states, width, run) cdict["experiment_name"] = experiment_name cdict["hdl_gen_params"]["num_lock_states"] = states cdict["hdl_gen_params"]["lock_comp_width"] = width # write to HJSON file hjson_filename = experiment_name + ".hjson" hjson_file_path = os.path.join(os.getenv("HW_FUZZING"), \ "experiments", hjson_filename) with open(hjson_file_path, "w") as fp: hjson.dump(cdict, fp) # launch fuzz the DUT fuzz([hjson_file_path]) # cleanup config file os.remove(hjson_file_path) print(LINE_SEP) print(LINE_SEP) print(LINE_SEP) print(color_str_green("DONE!")) print(LINE_SEP) print(LINE_SEP) print(LINE_SEP)
def main(): colorama.init(autoreset=True) parser = argparse.ArgumentParser() parser.add_argument("indirs", nargs="*", type=pathlib.Path) parser.add_argument("outdir", type=pathlib.Path) parser.add_argument("--dry-run", action='store_true') args = parser.parse_args() args.outdir.mkdir(exist_ok=True) if not args.dry_run: path = args.indirs[0] / 'hparams.hjson' new_path = args.outdir / 'hparams.hjson' # log this operation in the params! hparams = hjson.load(path.open('r')) hparams['created_by_merging'] = [str(indir) for indir in args.indirs] hjson.dump(hparams, new_path.open('w'), indent=2) print(path, '-->', new_path) for mode in ['train', 'test', 'val']: files = [] for in_dir in args.indirs: mode_indir = in_dir / mode tfrecord_files = mode_indir.glob("*.tfrecords") files.extend(tfrecord_files) traj_idx = 0 for i, file in enumerate(files): path = pathlib.Path(file) new_filename = f"example_{traj_idx:08d}.tfrecords" mode_outdir = args.outdir / mode mode_outdir.mkdir(parents=True, exist_ok=True) new_path = pathlib.Path(mode_outdir) / new_filename traj_idx += 1 print(path, '-->', new_path) if not args.dry_run: shutil.copyfile(path, new_path)
def _save(self, filename: str): ''' Saves off the current dictionary state in Plover to a file. :param filename: The file path of the dictionary to save to. ''' # Group dictionary by value data = {} for strokes, translation in self._dict.items(): # Need to join the multi-stroke entries into one stroke string first stroke = STROKE_DELIMITER.join(strokes) data.setdefault(translation, []).append(stroke) data[translation] = sorted(data[translation]) # Write out the data with open(filename, 'w', encoding='utf-8') as out_file: hjson.dump(data, out_file, sort_keys=True, ensure_ascii=False, encoding='utf-8')
def main(argv): parser = argparse.ArgumentParser(prog="vendor", description=__doc__) parser.add_argument( '--update', '-U', dest='update', action='store_true', help='Update locked version of repository with upstream changes') parser.add_argument('--refresh-patches', action='store_true', help='Refresh the patches from the patch repository') parser.add_argument('--commit', '-c', action='store_true', help='Commit the changes') parser.add_argument('--desc-override', '-D', dest="desc_overrides", action="append", type=define_arg_type, default=[], help='Override a setting in the description file. ' 'Format: -Dsome.key=value. ' 'Can be used multiple times.') parser.add_argument('desc_file', metavar='file', type=argparse.FileType('r', encoding='UTF-8'), help='vendoring description file (*.vendor.hjson)') parser.add_argument('--verbose', '-v', action='store_true', help='Verbose') args = parser.parse_args() global verbose verbose = args.verbose if (verbose): log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG) else: log.basicConfig(format="%(levelname)s: %(message)s") # Load input files (desc file; lock file) and check syntax etc. try: # Load description file desc = Desc(args.desc_file, args.desc_overrides) lock_file_path = desc.lock_file_path() # Try to load lock file (which might not exist) try: with open(str(lock_file_path), 'r') as lock_file: lock = LockDesc(lock_file) except FileNotFoundError: lock = None except (JsonError, ValueError) as err: log.fatal(str(err)) raise SystemExit(1) # Check for a clean working directory when commit is requested if args.commit: if not git_is_clean_workdir(desc.path.parent): log.fatal("A clean git working directory is required for " "--commit/-c. git stash your changes and try again.") raise SystemExit(1) if lock is None and not args.update: log.warning("No lock file at {}, so will update upstream repo." .format(str(desc.lock_file_path()))) args.update = True # If we have a lock file and we're not in update mode, override desc's # upstream field with the one from the lock file. Keep track of whether the # URL differs (in which case, we can't get a shortlog) changed_url = False if lock is not None: changed_url = desc.upstream.url != lock.upstream.url if not args.update: desc.upstream = lock.upstream if args.refresh_patches: refresh_patches(desc) with tempfile.TemporaryDirectory() as clone_dir: # clone upstream repository upstream_new_rev = clone_git_repo(desc.upstream.url, clone_dir, rev=desc.upstream.rev) if not args.update: if upstream_new_rev != lock.upstream.rev: log.fatal( "Revision mismatch. Unable to re-clone locked version of repository." ) log.fatal("Attempted revision: %s", desc.upstream.rev) log.fatal("Re-cloned revision: %s", upstream_new_rev) raise SystemExit(1) clone_subdir = Path(clone_dir) if desc.upstream.only_subdir is not None: clone_subdir = clone_subdir / desc.upstream.only_subdir if not clone_subdir.is_dir(): log.fatal("subdir '{}' does not exist in repo" .format(desc.upstream.only_subdir)) raise SystemExit(1) # copy selected files from upstream repo and apply patches as necessary desc.import_from_upstream(clone_subdir) # get shortlog get_shortlog = args.update if args.update: if lock is None: get_shortlog = False log.warning("No lock file %s: unable to summarize changes.", str(lock_file_path)) elif changed_url: get_shortlog = False log.warning("The repository URL changed since the last run. " "Unable to get log of changes.") shortlog = None if get_shortlog: shortlog = produce_shortlog(clone_subdir, desc.mapping, lock.upstream.rev, upstream_new_rev) # Ensure fully-qualified issue/PR references for GitHub repos gh_repo_info = github_parse_url(desc.upstream.url) if gh_repo_info: shortlog = github_qualify_references(shortlog, gh_repo_info[0], gh_repo_info[1]) log.info("Changes since the last import:\n" + format_list_to_str(shortlog)) # write lock file if args.update: lock_data = {} lock_data['upstream'] = desc.upstream.as_dict() lock_data['upstream']['rev'] = upstream_new_rev with open(str(lock_file_path), 'w', encoding='UTF-8') as f: f.write(LOCK_FILE_HEADER) hjson.dump(lock_data, f) f.write("\n") log.info("Wrote lock file %s", str(lock_file_path)) # Commit changes if args.commit: sha_short = git_get_short_rev(clone_subdir, upstream_new_rev) repo_info = github_parse_url(desc.upstream.url) if repo_info is not None: sha_short = "%s/%s@%s" % (repo_info[0], repo_info[1], sha_short) commit_msg_subject = 'Update %s to %s' % (desc.name, sha_short) intro = ('Update code from {}upstream repository {} to revision {}' .format(('' if desc.upstream.only_subdir is None else 'subdir {} in '.format(desc.upstream.only_subdir)), desc.upstream.url, upstream_new_rev)) commit_msg_body = textwrap.fill(intro, width=70) if shortlog: commit_msg_body += "\n\n" commit_msg_body += format_list_to_str(shortlog, width=70) commit_msg = commit_msg_subject + "\n\n" + commit_msg_body commit_paths = [] commit_paths.append(desc.target_dir) if args.refresh_patches: commit_paths.append(desc.patch_dir) commit_paths.append(lock_file_path) git_add_commit(commit_paths, commit_msg) log.info('Import finished')
def write_to_file(lists_path, lists): with open(lists_path, 'w', encoding="utf8") as lists_file: hjson.dump(lists, lists_file, indent=' ' * 2)
def save(self, fp, indent=None): ''' Save the JSON schema to a file ''' hjson.dump(self.saveJSON(), fp, indent=indent)
def save(self, fp, indent=None): """ Save the JSON schema to a file """ hjson.dump(self.saveJSON(), fp, indent=indent)
DEFAULT_CONFIG = { "version": 0.4, "peptide_modifications": {}, "glycan_modifications": {}, "substituent_rules": {}, "environment": { "log_file_name": "glycresoft-log", "log_file_mode": "a" }, "xml_huge_tree": True } _CURRENT_CONFIG = None if not HAS_CONFIG: hjson.dump(DEFAULT_CONFIG, open(USER_CONFIG_PATH, 'w')) def process(config): for key, value in config['peptide_modifications'].items(): load_modification_rule(value) for key, value in config["substituent_rules"].items(): load_substituent_rule(value) def recursive_merge(a, b): for k, v in b.items(): if isinstance(b[k], dict) and isinstance(a.get(k), dict): recursive_merge(a[k], v) else:
def set_configuration(obj): global _CURRENT_CONFIG _CURRENT_CONFIG = None hjson.dump(obj, open(USER_CONFIG_PATH, 'w')) return get_configuration()