def __init__(self, available_targets, file_name): configuration = dict( title="Discovery manager configure file", description="List of all fpga targets provided by discovery manager", targets=available_targets ) with open(file_name, 'w') as config_file: jsondump(configuration, config_file, sort_keys=True, indent=4)
def __init__(self, available_targets, file_name): configuration = dict( title="Discovery manager configure file", description= "List of all fpga targets provided by discovery manager", targets=available_targets) with open(file_name, 'w') as config_file: jsondump(configuration, config_file, sort_keys=True, indent=4)
def save_settings(settings_file, settings, values): if values: for key in SETTINGS_KEYS_TO_ELEMENT_KEYS: try: settings[key] = values[SETTINGS_KEYS_TO_ELEMENT_KEYS[key]] except Exception: print( f'Problem updating settings from window values. Key = {key}' ) with open(settings_file, 'w') as f: jsondump(settings, f)
def save_settings(settings_file, settings, values): if values: # if there are stuff specified by another window, fill in those values for key in SETTINGS_KEYS_TO_ELEMENT_KEYS: # update window with the values read from settings file try: settings[key] = values[SETTINGS_KEYS_TO_ELEMENT_KEYS[key]] except Exception as e: print(f'Problem updating settings from window values. Key = {key}') with open(settings_file, 'w') as f: jsondump(settings, f) sg.popup('Settings saved')
def save_settings(settings_file, settings, values): """"try to save values as settings_file.cfg (json)""" if values: # if there are stuff specified by another window, fill in those values for key in SETTINGS_KEYS_TO_ELEMENT_KEYS: # update window with the values read from settings file try: settings[key] = values[SETTINGS_KEYS_TO_ELEMENT_KEYS[key]] except Exception as e: print( f'Problem updating settings from window values. Key = {key}' ) with open(settings_file, 'w') as f: jsondump(settings, f, indent=4, sort_keys=True)
def save_parameters(parameters_file, parameters, values, PARAMETER_KEYS_TO_ELEMENT_KEYS): if values: # if there are stuff specified by another window, fill in those values for key in PARAMETER_KEYS_TO_ELEMENT_KEYS: # update window with the values read from settings file try: parameters[key] = values[PARAMETER_KEYS_TO_ELEMENT_KEYS[key]] except Exception as e: print( f'Problem updating parameters from window values. Key = {key}' ) with open(parameters_file, 'w') as f: jsondump(parameters, f)
def save_settings(notify, settings_file, settings, values): if values: for key in SETTINGS_KEYS_TO_ELEMENT_KEYS: try: settings[key] = values[SETTINGS_KEYS_TO_ELEMENT_KEYS[key]] except Exception as e: print( f"Problem updating settings from window values. Key = {key}" ) with open(settings_file, "w") as f: jsondump(settings, f) if notify: sg.popup("Settings saved", icon=icon_image)
def save_settings(settings_file, settingsList, values): settingsDict = {'nom_proc': 'Base_Unica', 'cod_proc': 1} if values: # if there are stuff specified by another window, fill in those values for key in SETTINGS_KEYS_TO_ELEMENT_KEYS: # update window with the values read from settings file try: settingsDict[key] = values[SETTINGS_KEYS_TO_ELEMENT_KEYS[key]] except Exception as e: print( f'Problem updating settings from window values. Key = {key}' ) settingsList.append(settingsDict) with open(settings_file, 'w') as f: jsondump(settingsList, f) sg.popup('Settings saved')
def dump(self, filename): """ Stores a Cube object in a json text file A dictionary with every side of the cube is written to a text file formated in JSON. The json module doesn't have support for numpy arrays so those are converted to normal python arrays first. """ dictionary = dict() dictionary["front"] = [list(elem) for elem in self.front] dictionary["back"] = [list(elem) for elem in self.back] dictionary["left"] = [list(elem) for elem in self.left] dictionary["right"] = [list(elem) for elem in self.right] dictionary["up"] = [list(elem) for elem in self.up] dictionary["down"] = [list(elem) for elem in self.down] file_handle = open(filename, 'r+') jsondump(dictionary, file_handle) file_handle.close()
def save(self, values=None): old_board_size = None if values is not None: for key in ct.SETTING_KEYS: # update self.settings with values read from settings file try: if key[1] == ct.K_BOARD_SIZE[1]: # keep current board_size until restart old_board_size = self.settings[key[1]] self.settings[key[1]] = values[key[1]] if len(key) == 5: # player 2 self.settings[key[2]] = values[key[2]] except Exception as e: self.logger.error(ct.MSG_ERROR_UPDATING_KEY, str(key), str(e)) with open(ct.SETTINGS_FILE, 'w') as f: jsondump(self.settings, f, indent=4, sort_keys=True) self.settings[ct.K_BOARD_SIZE[1]] = old_board_size or self.settings[ ct.K_BOARD_SIZE[1]]
def save_settings(settings, chosen_locations=None): if chosen_locations: settings['locations'] = chosen_locations with open(SETTINGS_FILE, 'w') as f: jsondump(settings, f)
def dump(obj, fp): jsondump(obj, fp, indent=4)
def save_settings(settings): with open(SETTINGS_FILE, 'w') as f: jsondump(settings, f)
def save_file(settings_file, settings, pop, window): with open(settings_file, 'w') as f: jsondump(settings, f) if pop and window is not None: sg.popup_quick_message('Settings Saved', location=window.current_location())
def __init__(self, release_old: str, release_new: str, output_path: str, set_name: Union[str, None] = None, suppress_missing_versions: bool = False): """ Take two release files, and compare them for differences, then \ generate patch file to given output path. Inorganic and for robots. :param release_old: web address or path to old release file :type release_old: str :param release_new: web address or path to new release file :type release_new: str :param output_path: path to output archive, if archive already exists, deletes archive and "overwrites" it with the new archive file :type output_path: str :param set_name: new patch NAME file, if not None, NAME check is ignored, default None :type set_name: Union[str, None] :param suppress_missing_versions: if True missing versions error is ignored, Supply class cannot detect the release automatically, Patcher must be directed to the patch archive manually, default False :type suppress_missing_versions: bool """ self.WORK_DIR = Weave.create_work_directory() self.release_old = release_old self.release_new = release_new if path.isdir(output_path) is False: raise Exceptions.PatchError("Specified output directory " + output_path + " is not a directory.") if "https://" in self.release_old[:8] or "http://" in \ self.release_old[:8]: release_old_grab = Backend.fetch(self.release_old) with open(gettempdir() + self.WORK_DIR + "/old/" + path.splitext(self.release_old)[1], "w") as \ release_old_data_dump: release_old_data_dump.write(release_old_grab.data) self.release_old = gettempdir() + self.WORK_DIR + "/old/" + \ path.splitext(self.release_old)[1] else: if path.isfile(self.release_old) is False: raise Exceptions.ReleaseError("Old release file " + self.release_old + " does not exist.") if "https://" in self.release_new[:8] or "http://" in \ self.release_new[:8]: release_new_grab = Backend.fetch(self.release_new) with open(gettempdir() + self.WORK_DIR + "/new/" + path.splitext(self.release_new)[1], "w") as \ release_new_data_dump: release_new_data_dump.write(release_new_grab.data) self.release_new = gettempdir() + self.WORK_DIR + "/new/" + \ path.splitext(self.release_new)[1] else: if path.isfile(self.release_new) is False: raise Exceptions.ReleaseError("New release file " + self.release_new + " does not exist.") unpack_archive(self.release_old, gettempdir() + self.WORK_DIR + "/old/") unpack_archive(self.release_new, gettempdir() + self.WORK_DIR + "/new/") try: with open(gettempdir() + self.WORK_DIR + "/old/NAME") as \ release_name_handle: self.release_name_old = release_name_handle.read() with open(gettempdir() + self.WORK_DIR + "/new/NAME") as \ release_name_handle: self.release_name_new = release_name_handle.read() if self.release_name_new != self.release_name_old and \ set_name is None: raise Exceptions.ReleaseError( "NAME files of old and new releases do not match." + " Old is " + self.release_name_old + " and new " + self.release_name_new + ".") except FileNotFoundError as ParentException: if set_name is not None: raise Exceptions.ReleaseError( "NAME files of old and new releases are missing.") from \ ParentException try: with open(gettempdir() + self.WORK_DIR + "/old/VERSION") as \ release_version_handle: self.release_version_old = release_version_handle.read() with open(gettempdir() + self.WORK_DIR + "/new/VERSION") as \ release_version_handle: self.release_version_new = release_version_handle.read() except FileNotFoundError as ParentException: if suppress_missing_versions is False: raise Exceptions.VersionError( "VERSION files of old and new releases are missing.") \ from ParentException else: self.release_version_old = "NaN" self.release_version_new = "NaN" if suppress_missing_versions is False and \ len(self.release_version_old.split(" -> ")) != 1 or \ len(self.release_version_new.split(" -> ")) != 1: raise Exceptions.UnableToParseError( 'Release versions contain " -> " which will disrupt Patcher ' + 'when trying to read the VERSIONS header.') self.index = Weave.comparison(self) with open(gettempdir() + self.WORK_DIR + "/patch/CHANGE.json", "w") \ as changelog_dump_handle: jsondump( { "remove": str(self.index[0]), "add": str(self.index[1]), "keep": str(self.index[2]), "replace": str(self.index[3]) }, changelog_dump_handle) for x in range(0, len(self.index[1])): component = Backend.directory_split_recursive(self.index[1][x]) for a in component: if path.isdir(gettempdir() + self.WORK_DIR + "/patch/add/" + a) is False: mkdir(gettempdir() + self.WORK_DIR + "/patch/add/" + a) if path.isfile(gettempdir() + self.WORK_DIR + "/new/" + self.index[1][x]) is True: copyfile( gettempdir() + self.WORK_DIR + "/new/" + self.index[1][x], gettempdir() + self.WORK_DIR + "/patch/add/" + self.index[1][x]) if path.isdir(gettempdir() + self.WORK_DIR + "/new/" + self.index[1][x]) is True: copytree( gettempdir() + self.WORK_DIR + "/new/" + self.index[1][x], gettempdir() + self.WORK_DIR + "/patch/add/" + self.index[1][x]) for y in range(0, len(self.index[3])): component = Backend.directory_split_recursive(self.index[3][y]) for b in component: if path.isdir(gettempdir() + self.WORK_DIR + "/patch/replace/" + b) is False: mkdir(gettempdir() + self.WORK_DIR + "/patch/replace/" + b) if path.isfile(gettempdir() + self.WORK_DIR + "/new/" + self.index[3][y]) is True: copyfile( gettempdir() + self.WORK_DIR + "/new/" + self.index[3][y], gettempdir() + self.WORK_DIR + "/patch/replace/" + self.index[3][y]) if path.isdir(gettempdir() + self.WORK_DIR + "/new/" + self.index[3][y]) is True: copytree( gettempdir() + self.WORK_DIR + "/new/" + self.index[3][y], gettempdir() + self.WORK_DIR + "/patch/replace/" + self.index[3][y]) with open(gettempdir() + self.WORK_DIR + "/patch/VERSIONS", "w") as \ release_version_handle: release_version_handle.write(self.release_version_old + " -> " + self.release_version_new) if set_name is None: with open(gettempdir() + self.WORK_DIR + "/patch/NAME", "w") as \ release_name_handle: release_name_handle.write(self.release_name_new) base_name = output_path + self.release_name_new + "_" + \ self.release_version_old + "_to_" + \ self.release_version_new + "_bandage_patch" make_archive(root_dir=gettempdir() + self.WORK_DIR + "/patch/", base_name=base_name, format="zip") else: with open(gettempdir() + self.WORK_DIR + "/patch/NAME", "w") as \ release_name_handle: release_name_handle.write(set_name) base_name = (output_path + set_name + "_" + self.release_version_old + "_to_" + self.release_version_new + "_bandage_patch") make_archive(root_dir=gettempdir() + self.WORK_DIR + "/patch/", base_name=base_name, format="zip") # TODO archive checksum generation rmtree(gettempdir() + self.WORK_DIR)
def retrieve_message(message_id): '''Retrieve message''' response = models.message.retrieve_message(message_id) status_code = 200 return jsondump(response, cls=models.DecimalEncoder), status_code
def save_usersetting(self, usersetting): with open(SETTING_PATH, "w") as f: jsondump(usersetting.to_json(), f)
" time to cross (seconds): ", 'Time to Cross', icon=bf_icon) try: int(ttc_in) except ValueError: sG.PopupError( 'That was not a valid number. Please try again.', icon=bf_icon) except TypeError: cancelled = True else: valid = True time_to_cross.append(int(ttc_in)) if cancelled is False: cont_processing(noise_threshold, SCAN_TIME, int(scan_error_in)) match_movements(total_start_time, total_end_time, out_folder, out_file_name, SCAN_TIME) window.Element('_CONSOLE_').Update( window.Element('_CONSOLE_').Get() + "----PROCESSING COMPLETE----") rerun = True with open(out_folder + os_slash + out_file_name + '.cfg', 'w') as f: jsondump(config_dict, f) else: time_to_cross = []
def post(self, url, **json): return self.requests.post(url, data=jsondump(json))
def default_settings(settings_file): with open(settings_file, 'w') as f: jsondump(DEFAULT_SETTINGS, f) sg.popup('Settings saved')
def save_to_json(self, dst: Path): print(f"save setting {dst}") with open(dst, "w") as f: jsondump(asdict(self), f)
#set longitude values - Y values #minx = -180 #maxx = 180 minx = 40 maxx = 65 #set latitude values - X values #miny = -23.5 #maxy = 23.5 miny = 11.5 maxy = 15.5 print "Start Time:", strftime("%a, %d %b %Y %H:%M:%S") #Print the column headers #print >>f, "ID",",","X",",","Y" for x in range(0,numpoints): #print >>f, x,",", random.uniform(minx,maxx),",", random.uniform(miny,maxy) rec = {'lat': random.uniform(minx,maxx), 'lng': random.uniform(miny,maxy), 'media': 'http://placehold.it/200x200', 'text': get_lipsum(random.randint(12,140), 'bytes', 'no')[0], 'category': random.randint(0,9) } print jsondump(rec) r = requests.post("http://emea-fr-01.services.massforstroel.se/ads", data=jsondump(rec)) print r.text #f.close() print "Script Complete, Hooray!", numpoints, "random points generated" print "End Time:", strftime("%a, %d %b %Y %H:%M:%S")
def returnJson(self, object): self.response.headers['Content-Type'] = "application/json" self.response.out.write(jsondump(object))
def make_settings_save_file(dict_, settings_file_name): with open(settings_file_name, 'w') as f: jsondump(dict_, f, indent=4)
#if not unzipto(absolute, temppath): # print u'错误:解压 %s 失败!'%args.src # sys.exit(1) bigdict, msg_anim = parsefla(d) if not bigdict: continue # 使用flash自带功能搞定 # packimages(d, outputroot) # dump protocol msg msgdestpath = os.path.join(outputroot, current_filename+'.anim') dumpmsg(msg_anim, msgdestpath) print 'write', msgdestpath # dump animxml destpath = os.path.join(outputroot, current_filename+'.animxml') dumpplist(bigdict, destpath) print 'write', destpath if False: destpath = os.path.join(outputroot, os.path.splitext(f)[0]+'.animxml.json') result = jsondump(bigdict, file(destpath, 'w'), indent=4) print 'write', destpath # 生成索引 #print u'生成索引' #import genindex #genindex.main(outputroot)