def log_iteration(self, iteration: int, solutions: Iterable) -> None: """ Logging method that will be called at each iteration of main optimization algorithm. :param iteration: Number of iteration of main optimization algorithm. :param solutions: Solutions found in this iteration. """ if self.verbosity >= LoggingVerbosity.AllSolutions: if iteration > 0: mode = "a" else: mode = "w" data_to_log = { f"Iteration {iteration}": [solution.get_log_data() for solution in solutions] } if self.log_format == LoggingFormat.YAML: file_path = path.join(self.optimization_process_dir, "solutions.yaml") # type: ignore with open(file_path, mode) as yaml_file: yaml_dump(data_to_log, yaml_file, YamlDumper) elif self.log_format == LoggingFormat.JSON: file_path = path.join(self.optimization_process_dir, "solutions.json") # type: ignore with open(file_path, mode) as json_file: json_dump(data_to_log, json_file)
def log_lower_level_at_end(self, upper_iteration: int, lower_algorithm_index: int, best_solution, optimization_time: timedelta) -> None: """ Logging method that will be called at the end of optimization process. :param upper_iteration: Upper algorithm iteration. :param lower_algorithm_index: Lower algorithm index. :param best_solution: The best solution found by the optimization algorithm. :param optimization_time: Optimization process duration time. """ # assess data to log log_data = {} if self.verbosity >= LoggingVerbosity.OptimizationTime: log_data["optimization_duration"] = str(optimization_time) if self.verbosity >= LoggingVerbosity.BestSolution: log_data["best_solution"] = best_solution.get_log_data() # log to file if log_data: if self.log_format == LoggingFormat.YAML: file_path = path.join( self.optimization_process_dir, # type: ignore f"iter_{upper_iteration}_alg_{lower_algorithm_index}_best_solution.yaml" ) with open(file_path, "w") as yaml_file: yaml_dump(log_data, yaml_file, YamlDumper) elif self.log_format == LoggingFormat.JSON: file_path = path.join( self.optimization_process_dir, # type: ignore f"iter_{upper_iteration}_alg_{lower_algorithm_index}_best_solution.json" ) with open(file_path, "w") as json_file: json_dump(log_data, json_file)
def _dump_algorithm_data(self, algorithm_data: dict, stop_conditions_data: dict) -> None: """ Dumps optimization algorithm data to proper file. :param algorithm_data: Configuration data of optimization algorithm used. :param stop_conditions_data: Configuration data of optimization process stop conditions. """ if self.log_format == LoggingFormat.YAML: algorithm_file_path = path.join(self.optimization_process_dir, "algorithm.yaml") # type: ignore stop_conditions_file_path = path.join( self.optimization_process_dir, "stop_conditions.yaml") # type: ignore with open(algorithm_file_path, "w") as yaml_file: yaml_dump(algorithm_data, yaml_file, YamlDumper) with open(stop_conditions_file_path, "w") as yaml_file: yaml_dump(stop_conditions_data, yaml_file, YamlDumper) elif self.log_format == LoggingFormat.JSON: algorithm_file_path = path.join(self.optimization_process_dir, "algorithm.json") # type: ignore stop_conditions_file_path = path.join( self.optimization_process_dir, "stop_conditions.json") # type: ignore with open(algorithm_file_path, "w") as json_file: json_dump(algorithm_data, json_file) with open(stop_conditions_file_path, "w") as json_file: json_dump(stop_conditions_data, json_file)
def unique_project(self): """iterate all nodes in pbxproj file: PBXProject XCConfigurationList PBXNativeTarget PBXTargetDependency PBXContainerItemProxy XCBuildConfiguration PBX*BuildPhase PBXBuildFile PBXReferenceProxy PBXFileReference PBXGroup PBXVariantGroup """ self.__unique_project(self.root_hex) if self.verbose: debug_result_file_path = path.join(self.xcodeproj_path, 'debug_result.json') with open(debug_result_file_path, 'w') as debug_result_file: json_dump(self.__result, debug_result_file) warning_print("Debug result json file has been written to '", debug_result_file_path, sep='') self.substitute_old_keys()
def generate_all_files(output=OUTPUT, client_secrets=CLIENT_SECRETS, credentials=CREDENTIALS): response = None if os.path.exists(output): try: with open(OUTPUT, 'r') as f: response = json_load(f) except ValueError: response = None os.remove(output) if not response: storage = Storage(credentials) credentials = storage.get() if not credentials: flow = flow_from_clientsecrets( client_secrets, scope='https://www.googleapis.com/auth/drive') credentials = run_flow(flow, storage) http = Http() http = credentials.authorize(http) service = build('drive', 'v2', http=http) try: response = { 'all_files': retrieve_all_files(service), 'about': service.about().get().execute() } except errors.HttpError, e: error('An error occurred: %s' % e) response = None else: with open(output, 'w') as f: json_dump(response, f, indent=2)
def handle_status(self, data, **kwargs): if self.fp is None: return fp = self.fp minimum_severity = self.minimum_severity host_identifier = kwargs.get('host_identifier') created = dt.datetime.utcnow().isoformat() for item in data.get('data', []): if int(item['severity']) < minimum_severity: continue if 'created' in item: item['created'] = item['created'].isoformat() json_dump({ '@version': 1, '@host_identifier': host_identifier, '@timestamp': item.get('created', created), '@message': item.get('message', ''), 'log_type': 'status', 'line': item.get('line', ''), 'message': item.get('message', ''), 'severity': item.get('severity', ''), 'filename': item.get('filename', ''), 'osquery_version': item.get('version'), # be null 'created': created, }, fp) fp.write('\r\n') else: fp.flush() os.fsync()
def test_users_deletion(self): """Test API can delete an existing users. (DELETE request).""" user = {'name': 'Test User 2', "password": "******"} response = self.client().post( '/users', data=json_dump(user), headers={"Content-Type": "application/json"}) self.assertEqual(response.status_code, 201) users = json_load(response.data)["users"] user_id = users["_id"] auth = self.get_token(json_dump({"username": user["name"], "password": user["password"]})) headers = {**auth, **{"Content-Type": "application/json"}} res = self.client().delete(f'/users/{user_id}', headers=headers) self.assertEqual(res.status_code, 204) # Test to see if it exists, should return a 401 result = self.client().get(f'/users/{user_id}', headers=headers) self.assertEqual(result.status_code, 401)
def save(self): # Export config tmp = {} try: for key in self.__dict__: if key[:1].isupper(): tmp[key] = self.__dict__[key] except: print("Failed to export configuration") exit(1) # Save config try: with open(self.configFile, encoding=self.encoding, mode="w+") as f: json_dump(tmp, f, indent=4, ensure_ascii=False) except: print("Failed to save settings.json") exit(1) # Save config copy try: with open(self.pathRoot + "settings.bak", encoding=self.encoding, mode="w+") as f: json_dump(tmp, f, indent=4, ensure_ascii=False) except: print("Failed to save settings.bak") exit(1)
def scrape_website_for_stats(aws_config_file_path): page = get(url) soup = BeautifulSoup(page.text, 'html.parser') list_of_classes = ['bg-blue', 'bg-green', 'bg-red', 'bg-orange'] dashboard_data = soup.find(id='site-dashboard').find( class_='site-stats-count') dashboard_dict = {} for class_type in list_of_classes: text = soup.find(class_=class_type).find('span').get_text() number = int(soup.find(class_=class_type).find('strong').get_text()) if 'active' in text.lower(): dashboard_dict['active'] = number if 'death' in text.lower(): dashboard_dict['deaths'] = number if 'cured' in text.lower(): dashboard_dict['recovered'] = number if 'migrated' in text.lower(): dashboard_dict['migrated'] = number update_time = dashboard_data.find( class_='status-update').find('span').get_text() dashboard_dict['update_time'] = update_time covid_india_stats_filename = 'covid-india-stats.json' with open(covid_india_stats_filename, 'w') as json_file: json_dump(dashboard_dict, json_file) upload_to_aws("covid-india-stats.json", "covid19-india-datasets", "covid-india-stats.json", aws_config_file_path)
def scrape_website_for_state_data(aws_config_file_path): page = get(url) soup = BeautifulSoup(page.text, 'html.parser') soup = soup.find(id='state-data') table_data = soup.find('tbody').find_all('tr') state_data = {} #order of data in row in with name of serial number, state, total cases, recovered and deaths for row in table_data: row = row.find_all('td') if len(row) == 5: state_name = row[1].get_text() active_cases = row[2].get_text() recovered = row[3].get_text() deaths = row[4].get_text() state_latitude, state_longitude = get_lat_lang( None, state_name, 'previous_data.json') state_data[state_name] = { 'active_cases': active_cases, 'recovered': recovered, 'deaths': deaths, 'latitude': state_latitude, 'longitude': state_longitude } covid_india_states_data_filename = 'covid-india-states-data.json' with open(covid_india_states_data_filename, 'w') as json_file: json_dump(state_data, json_file) upload_to_aws("covid-india-states-data.json", "covid19-india-datasets", "covid-india-states-data.json", aws_config_file_path)
def dump(self): """Write the contents of this dict to file.""" s = dict([key, self[key]] for key in sorted(self)) with open(SERVERLIST_JSON, 'w') as f: json_dump(s, f, indent=4) f.write('\n')
def generate_all_files(output=OUTPUT, client_secrets=CLIENT_SECRETS, credentials=CREDENTIALS): response = None if os.path.exists(output): try: with open(OUTPUT, "r") as f: response = json_load(f) except ValueError: response = None os.remove(output) if not response: storage = Storage(credentials) credentials = storage.get() if not credentials: flow = flow_from_clientsecrets(client_secrets, scope="https://www.googleapis.com/auth/drive") credentials = run_flow(flow, storage) http = Http() http = credentials.authorize(http) service = build("drive", "v2", http=http) try: response = {"all_files": retrieve_all_files(service), "about": service.about().get().execute()} except errors.HttpError, e: error("An error occurred: %s" % e) response = None else: with open(output, "w") as f: json_dump(response, f, indent=2)
def run(template_fd, resource_fds, output_fd, local_tags, format="yaml"): assemblies = {} for fd in resource_fds: try: record_assemblies(fd, assemblies, local_tags) except YAMLError as e: log.error("While processing resource document %s:", getattr(fd, "filename", "<input>")) log.error("%s", str(e)) return 1 try: docs = transclude_template(template_fd, assemblies, local_tags) except YAMLError as e: log.error("While processing template document %s:", getattr(template_fd, "filename", "<input>")) log.error("%s", str(e)) return 1 if format == "json": if len(docs) > 1: log.warning("Multiple documents are not supported with JSON " "output; only the first document will be written.") constructor = SafeConstructor() pyobjs = constructor.construct_document(docs[0]) json_dump(pyobjs, output_fd) else: yaml_serialize_all(docs, stream=output_fd, Dumper=SafeDumper) return 0
def create_view(self, new_view, view_db, index_db): """Changes the content of database/view_db/current_view.json ``new_view`` -- Path to the open tab in sublime. ``view_db`` -- Path to folder where current_view.json is. ``index_db`` -- Path in index database folder. When user changes between different robot framework data tabs, this function changes the context of the database/view_db/current_view.json. The current_view.json. is used to provide the completions for the Sublime on_query_completions API call. """ view_path = path.join(view_db, VIEW_FILE_NAME) new_view = normalise_path(new_view) index_table = 'index-{0}'.format(rf_table_name(new_view)) index_table = path.join(index_db, index_table) index_data = self.get_data(index_table) data = {} data[DBJsonSetting.variable] = index_data[DBJsonSetting.variable] data[VIEW_NAME] = new_view data[VIEW_MD5] = hashlib.md5(new_view.encode('utf-8')).hexdigest() data[KW_COMPLETION] = self.get_keyword_completions(index_data) if not path.exists(path.dirname(view_path)): mkdir(path.dirname(view_path)) f = open(view_path, 'w') json_dump(data, f, indent=4) f.close()
def modificar_objeto(CONFIGURACION, objeto, indices): objeto = json_loads(objeto) if not CONFIGURACION['ATRIBUTO_PRIMARIO'] in objeto.keys(): raise Exception('OBJETO_SIN_ATRIBUTO_PRIMARIO') json = cargar_json(CONFIGURACION) encontrado, objeto_encontrado, indice_objeto_almacenado = buscar_objeto( CONFIGURACION, indices, json, objeto[CONFIGURACION["ATRIBUTO_PRIMARIO"]]) objeto_encontrado = None if not encontrado: raise Exception('NO_EXISTE_EL_DESTINO') if indice_objeto_almacenado == None: json[indices[0]] = objeto else: json[indices[0]][indice_objeto_almacenado] = objeto with open(CONFIGURACION['FICHERO_JSON'], 'w') as archivo: json_dump(json, archivo) return str(objeto)
def _change_theme_to(self, name='Dark'): self.settings['current_theme'] = name with open('./app/settings/settings.json', 'w') as outfile: json_dump(self.settings, outfile, indent=4) self._realod_app()
def _get_or_create_config(self, module, path, path_es): if (CFG_PATH / f'{module}.json').isfile(): with open(CFG_PATH / f'{module}.json') as inputfile: config = json_load(inputfile) if 'categories' not in config: config['categories'] = {} if module not in config: config[module] = [] else: config = {'categories': {}, module: []} if module == 'items': config['maxitems'] = {} for name in set(x.name for x in ( path.listdir() + (path_es.listdir() if IS_ESC_SUPPORT_ENABLED else []))): if self._is_valid_config_files(name, path): config[module].append(name) with open(CFG_PATH / f'{module}.json', 'w') as outputfile: json_dump(config, outputfile, indent=4) return config
def install_kernel(user=True, prefix=None): """ Install the kernel for use by Jupyter. """ td = mkdtemp() try: chmod(td, 0o755) # Starts off as 700, not user readable with open(path.join(td, "kernel.json"), "w") as f: json_dump( { "argv": [ "python", "-m", "py2neo.cypher.kernel", "launch", "-f", "{connection_file}" ], "display_name": "Cypher", "language": "cypher", "pygments_lexer": "py2neo.cypher", }, f, sort_keys=True) return KernelSpecManager().install_kernel_spec(td, KERNEL_NAME, user=user, prefix=prefix) finally: rmtree(td)
def unique_project(self): # 遍历以下类型的节点: # PBXProject # XCConfigurationList # PBXNativeTarget # PBXTargetDependency # PBXContainerItemProxy # XCBuildConfiguration # PBX*BuildPhase # PBXBuildFile # PBXReferenceProxy # PBXFileReference # PBXGroup # PBXVariantGroup # 从根节点出发。 self.__unique_project(self.root_hex) if self.verbose: debug_result_file_path = path.join(self.xcodeproj_path, 'debug_result.json') with open(debug_result_file_path, 'w') as debug_result_file: json_dump(self.__result, debug_result_file) warning_print("Debug result json file has been written to '", debug_result_file_path, sep='') self.substitute_old_keys()
def handle_result(self, data, **kwargs): if self.fp is None: return fp = self.fp host_identifier = kwargs.get('host_identifier') created = dt.datetime.utcnow().isoformat() for item in extract_results(data): json_dump( { '@version': 1, '@host_identifier': host_identifier, '@timestamp': item.timestamp.isoformat(), 'log_type': 'result', 'action': item.action, 'columns': item.columns, 'name': item.name, 'created': created, }, fp) fp.write('\r\n') else: fp.flush() os.fsync()
def create(self, file_name: str) -> Generator[None, None, None]: assert file_name.endswith(".tf.json") or file_name.endswith( ".tfvars.json") assert "/" not in file_name if not hasattr(self, "_blocks"): self._blocks: Dict[str, list] = collections.defaultdict(list) if not hasattr(self, "_create"): self._create: List[str] = [] self._create.append(file_name) yield self._create.pop() contents = self._blocks.pop(file_name) with open(file_name, "w") as open_file: json_dump(contents, open_file, indent=2, default=render.json_default)
def handle_result(self, data, **kwargs): if self.fp is None: return fp = self.fp host_identifier = kwargs.get('host_identifier') created = dt.datetime.utcnow().isoformat() try: for item in extract_results(data): json_dump({ '@version': 1, '@host_identifier': host_identifier, '@timestamp': item.timestamp.isoformat(), 'log_type': 'result', 'action': item.action, 'columns': item.columns, 'name': item.name, 'created': created, }, fp) fp.write('\r\n') finally: fp.flush() os.fsync(fp.fileno())
def __init__(self, config: Dict, logging_folder: str, checkpoints_folder: str): super().__init__(checkpoints_folder) self.file_path = join_path(logging_folder, f'{self.timestamp}.log') with open(self.file_path, 'w') as logging_file: json_dump(config, logging_file) logging_file.write('\n' + self._dividing_line)
def as_json(self): json = {} return "Testing" return json_dump(json) for place in self.places: json[place.__name__] = place.as_json() return json_dump(json)
def collect(): files = os.listdir(reduce_directory) collected = {} for file in files: with open(reduce_directory + file, 'rb') as f: reduced = load(f) collected.update(reduced) words = len(collected.keys()) top = max(collected, key=collected.get) bot = min(collected, key=collected.get) print(f'The collected processed data has {words} different words.') print( f'The most common word is "{top}" with {collected[top]} occurrences.') print( f'One of the least common words is "{bot}" with {collected[bot]} occurrences.' ) with open('collected', 'w+') as f: json_dump([(key, collected[key]) for key in sorted(collected)], f, indent=2)
def fetch_yandex( dump_folder: Optional[str] = COVID19RU_PENDING) -> PendingData: """ Fetch COVID19 data from Yandex Based on https://github.com/AlexxIT/YandexCOVID/blob/master/custom_components/yandex_covid/sensor.py """ text = fetch_yandex_text() m = RE_HTML.search(text) assert m is not None, "Yandex page doesn't contain 'covid-view' tag" data = json.loads(m[1]) attrs = { p['name']: { 'cases': p['cases'], 'cured': p['cured'], 'deaths': p['deaths'], 'coordinates':list(p['coordinates']), # [Lon,Lat] !!! 'histogram':p.get('histogram',[]) } \ for p in data['covidData']['items'] \ if ('ru' in p) and (p['ru'] is True) } data = PendingData(datetime.utcnow(), attrs) if dump_folder is not None: assert isdir(dump_folder) filepath = join(dump_folder, timestring(data.utcnow) + '.json') with open(filepath, 'w') as f: json_dump(data.val, f, indent=4, ensure_ascii=False) print(f'Saved {filepath}') return data
def unique_project(self): """iterate all nodes in pbxproj file: PBXProject XCConfigurationList PBXNativeTarget PBXTargetDependency PBXContainerItemProxy XCBuildConfiguration PBXSourcesBuildPhase PBXFrameworksBuildPhase PBXResourcesBuildPhase PBXBuildFile PBXReferenceProxy PBXFileReference PBXGroup PBXVariantGroup """ self.__unique_project(self.root_hex) if self.verbose: debug_result_file_path = path.join(self.xcodeproj_path, 'debug_result.json') with open(debug_result_file_path, 'w') as debug_result_file: json_dump(self.__result, debug_result_file) self.vprint("result json file has been written to '{}'".format(debug_result_file_path)) self.replace_uuids_with_file()
def main(): project_id = cmdline_argv[1] out_file_path = cmdline_argv[2] schema = do_query(project_id, CLINICAL_TABLE_ID) with open(out_file_path, 'w') as outfile: json_dump(schema, outfile, sort_keys=True, indent=2)
def exportalarmsJSON(self,filename): try: self.savealarms() alarms = {} alarms["alarmflags"] = self.aw.qmc.alarmflag alarms["alarmguards"] = self.aw.qmc.alarmguard alarms["alarmnegguards"] = self.aw.qmc.alarmnegguard alarms["alarmtimes"] = self.aw.qmc.alarmtime alarms["alarmoffsets"] = self.aw.qmc.alarmoffset alarms["alarmconds"] = self.aw.qmc.alarmcond alarms["alarmsources"] = self.aw.qmc.alarmsource alarms["alarmtemperatures"] = self.aw.qmc.alarmtemperature alarms["alarmactions"] = self.aw.qmc.alarmaction alarms["alarmbeep"] = self.aw.qmc.alarmbeep alarms["alarmstrings"] = list(self.aw.qmc.alarmstrings) outfile = open(filename, 'w') from json import dump as json_dump json_dump(alarms, outfile, ensure_ascii=True) outfile.write('\n') outfile.close() return True except Exception as ex: _, _, exc_tb = sys.exc_info() self.aw.qmc.adderror((QApplication.translate("Error Message", "Exception:",None) + " exportalarmsJSON(): {0}").format(str(ex)),exc_tb.tb_lineno) return False
def __display_json(self, records, default): json_dump(records, sys.stdout, ensure_ascii=False, default=default, sort_keys=True, indent=2)
def slideshow(): form = SlideshowSettingsForm() if request.method == 'POST': if form.validate_on_submit(): flash(_l('Config saved')) if form.background_image.data != None: f = form.background_image.data f.save( os.path.join('/var/lib/lliurex-news', 'slideshow_background.png')) with open('/etc/lliurex-news/slideshow.conf', 'w') as fd: config = form.data config.pop('background_image') config.pop('submit') config.pop('csrf_token') json_dump(config, fd, indent=4) else: if os.path.exists('/etc/lliurex-news/slideshow.conf'): try: with open('/etc/lliurex-news/slideshow.conf', 'r') as fd: obj = json_load(fd) form = SlideshowSettingsForm(**obj) except: pass return render_template( 'admin/slideshow/slideshow_admin.html', title='admin slideshow', list_menu=get_menu_list('admin_slideshowmodule.slideshow'), form=form)
def merge_abi_files(files: Iterable[Path], result_path: Path): files = (open(str(path), 'r') for path in files if path.exists()) json_dump(obj=merge_abi(map(json_load, files)), fp=open(str(result_path), 'w'), indent=True) [file.close() for file in files]
def decorator(old_function): stdin = TemporaryFile("w+t") json_dump(stdin_payload, stdin) stdin.seek(0) patcher = patch("insights.client.client.sys.stdin", new_callable=PropertyMock(return_value=stdin)) return patcher(old_function)
def temp_conf_file(): """ Creates a valid temporary config file. """ collection_rules_file = NamedTemporaryFile() json_dump({"version": "1.2.3"}, collection_rules_file) collection_rules_file.seek(0) return collection_rules_file
def save_settings(self, _=None): settings['projects_dir'] = self.projects_dir settings['photo_an']['preview_size'] = (int(self.scale_preview_size.get() / 10) * 10) settings['photo_an']['geo_an'] = self.ch_btn_geo_an_value.get() settings['photo_an']['obj_detect_an'] = self.ch_btn_obj_detect_an_value.get() settings['photo_an']['project_an'] = self.ch_btn_project_an_value.get() with open(settings_json, 'w', encoding='utf-8') as f: json_dump(settings, f, sort_keys=True, indent=2) self.destroy()
def remove(cls, name): with open(cls.FILE, "r+") as f: json_obj = json_load(f) if name not in json_obj: logger.debug("No such name %s" % (name,)) return False del json_obj[name] with open(cls.FILE, "w") as f: json_dump(json_obj, f, indent=2, sort_keys=True) return True
def spamNewPhys(table,id, user, status = "declined"): """also accepts post arguments that let you remove a preliminary decay/particle or add it to the live table""" try: ret=new_physics.remove({"_id":ObjectId(id), "type": table}) if ret['n'] == 1: return Response(json_dump({'result' : True, "err": ""}), mimetype='application/json') else: return Response(json_dump({'result' : False, "err": "%i rows removed" % ret['n'] }), mimetype='application/json') except Exception as err: return Response(json_dump({'result' : False, "err": str(err)}), mimetype='application/json')
def safe_json(obj: object, filepath: str, **kwargs): temp = StringIO() kwargs['indent'] = kwargs.get('indent', 4) json_dump(obj, temp, **kwargs) tail, _ = path_split(filepath) if not path_exists(tail): makedirs(tail) with open(filepath, 'w') as f: f.write(temp.getvalue())
def register(cls, name, address, port, old_name=None, **options): if exists(cls.FILE): with open(cls.FILE, "r+") as f: json_obj = json_load(f) else: json_obj = {} if name in json_obj: raise ValueError("Name already taken") if old_name in json_obj: logger.debug("Removing %s" % (old_name,)) del json_obj[old_name] json_obj[name] = address, port, options with open(cls.FILE, "w") as f: json_dump(json_obj, f, indent=2, sort_keys=True)
def round_json(query1,query2): try: interval =smart_interval(float(query1),float(query2),2) #print [str(interval[0]),str(interval[1])] return Response(json_dump([str(interval[0]),str(interval[1])]), mimetype='application/json') except: return False
def getAccessControlList(resource): """Given a Resource, return what tags/users can/cannot access a reource and why as a JSON structure""" users = _getResourceUsers(resource) jsonarr = [] c = {'board': "Contact [email protected] with any questions.", 'orientation': 'Orientation is every Thursday at 7pm, or contact board@makeitlabs to schedule a convenient time', 'resource': "See the Wiki for training information and resource manager contact info."} # TODO: Resource-specific contacts? # Now that we know explicit allowed/denied per resource, provide an message for u in users: warning = "" allowed = u['allowed'] if u['past_due'] == 'true': warning = "Your membership expired (%s) and the grace period for access has ended. %s" % (u['expires_date'],c['board']) allowed = 'false' elif u['enabled'] == 0: if u['reason'] is not None: # This indicates an authorized admin has a specific reason for denying access to ALL resources warning = "This account has been disabled for a specific reason: %s. %s" % (u['reason'],c['board']) else: warning = "This account is not enabled. It may be newly added and not have a waiver on file. %s" % c['board'] allowed = 'false' elif u['allowed'] == 'denied': # Special 'are you oriented' check if resource == 'frontdoor': warning = "You have a valid membership, but you must complete orientation for access. %s" % c['orientation'] else: warning = "You do not have access to this resource. %s" % c['resource'] elif u['grace_period'] == 'true': warning = """Your membership expired (%s) and you are in the temporary grace period. Correct this as soon as possible or you will lose all access! %s""" % (u['expires_date'],c['board']) jsonarr.append({'tagid':u['tagid'],'allowed':allowed,'warning':warning,'member':u['member'],'nickname':u['nickname'],'plan':u['plan'],'last_accessed':u['last_accessed']}) return json_dump(jsonarr)
def p_json(query): if query=="null": p_names = [] for p in Particle.objects(): p_names.append(p.name) return Response(json_dump({'p_names':p_names}), mimetype='application/json') p_names = [] last = query.split(" ")[-1] n_tot=10 for p in Particle.objects(): if last == p.name[0:len(last)]: p_names.append(query[:-len(last)]+p.name) n_tot-=1 if n_tot<1: break return Response(json_dump({'p_names':p_names}), mimetype='application/json')
def async_render_GET(self, request): response_body = yield self.run_query_GET(request.args) if response_body is None: request.setResponseCode(404) returnValue(b'') request.setHeader('Content-Type', 'application/json') returnValue(json_dump(response_body, cls=ComplexEncoder).encode())
def s_json(query1, query2): try: existing_runs=find_existing_runs(int(query1),int(query2)) print "collecting info_lite" info_lite = get_info_lite(existing_runs) print "info collected" return Response(json_dump(get_info_lite(existing_runs)), mimetype='application/json') except: return False
def async_render_PUT(self, request): submission = json_load(request.content.read().decode()) response_body = yield self.run_query_PUT(submission, request.args) if response_body is None: request.setResponseCode(404) returnValue(b'') request.setHeader('Content-Type', 'application/json') returnValue(json_dump(response_body, cls=ComplexEncoder).encode())
def Capture(this_program, to_json=None, logfile=None, timeout=None, python=DEFAULT_PYTHON): """Run `this_program` by ProcessCapsule and Capture I/O `to_json`.""" captured = [] with Capsule(this_program, logfile=logfile, python=python) as capsule: capsule.run(with_check=False) while not capsule.is_dead(): _TerminalCapsuleUtils.endpoints( _TerminalCapsuleUtils.hook( capsule, timeout=timeout, ), records=captured, ) if to_json: with open(to_json, 'w') as json_fp: json_dump(captured, json_fp) return captured
def exportResults(self, kind='normal'): i = 0 name, ext = os.path.splitext(self.args.export) while os.path.exists(self.args.export): i += 1 self.args.export = '%s_%d%s' % (name, i, ext) if kind == 'full': ret = self.rated_results else: #ret = {k:[{kk:dd[kk] for kk in ['basename', 'rated']} for dd in self.rated_results[k]] for k in self.rated_results} ret = {rt: {} for rt in self.rated_results} for r,l in _iteritems(self.rated_results): for m in l: ret[r][m['basename']] = m['rated'] with open(self.args.export, 'w') as outFile: json_dump(ret, outFile)
def render_json(stream): components = map(lambda comp: { "processId" : comp.processId(), "name" : comp.name(), "metrics" : map(lambda value: {"name":value.name(), "value":value.value(), "type":value.type_name(), "mtime":value.mtime()}, comp.list()) }, cmx.Registry.list()) return json_dump(components, stream)
def main(args=None): if args is None: args = sys.argv[1:] parser = ArgumentParser(description='cluster sequences using DBSCAN') parser.add_argument('input', nargs='?', type=FileType('r'), default=sys.stdin) parser.add_argument('output', nargs='?', type=FileType('w'), default=sys.stdout) parser.add_argument('-k', type=int, default=16, help='K-mer size for hashing') parser.add_argument('-m', type=int, default=8, help='min size of K-mer after homopolymer reduction') ns = parser.parse_args(args) seqs = [r for r in SeqIO.parse(ns.input, 'fasta')] m = cluster(seqs, ns.k, ns.m) json_dump(m, sys.stdout, indent=1) return 0
def prepare(self): cfg = self.conf args = self.args c = cfg.get('amd') if c: args.append('--amd') c = cfg.get('commonjs') if c: args.append('--commonjs') c = cfg.get('handlebarpath') if c: args.append("--handlebarPath='%s'" % c) for handler in make_list(cfg.get('known')): args.append("--known='%s'" % handler) c = cfg.get('known_only') if c: args.append('--knownOnly') c = cfg.get('minimize') if c: args.append('--min') c = cfg.get('namespace') if c: args.append("--namespace='%s'" % c) c = cfg.get('simple') if c: args.append('--simple') c = cfg.get('root') if c: args.append("--root='%s'" % c) c = cfg.get('partial') if c: args.append('--partial') c = cfg.get('data') if c: args.append("--data='%s'" % json_dump(c)) c = cfg.get('extension') if c: args.append("--extension='%s'" % c) c = cfg.get('bom') if c: args.append('--bom')
def json(query): if not query: return redirect('/') query = [x for x in query.split(' ') if x != ''] start = datetime.now() result = do_search(query) end = datetime.now() return Response(json_dump({'result' : result, 'time': str(end-start)}), mimetype='application/json')
def browse(): response.menu = default_menu() respdict = {} attribs = get_attribs() respdict['attribs'] = attribs attrib_values = {} for attrib in attribs: vals = get_attrib_values(attrib.id) vallist = [(v.id, v.name) for v in vals] attrib_values[attrib.id] = vallist respdict['attrib_values'] = attrib_values from json import dumps as json_dump respdict['attrib_values_json'] = json_dump(attrib_values).replace("'","\\'") reels = [] respdict['query_att'] = '' respdict['query_val'] = '' if len(request.args)==2: att_id = request.args[0] val_id = request.args[1] respdict['query_att'] = att_id respdict['query_val'] = val_id q = db((db.reel_metadata.attrib==att_id) & (db.reel_metadata.value==val_id)) res = q.select(db.reel_metadata.reel, orderby=db.reel_metadata.reel.name) reels = [x.reel for x in res] elif len(request.args)==1 and request.args[0]=='all': res = db().select(db.reel.ALL, orderby=db.reel.name) reels = [x for x in res] else: redirect(URL('stations', 'browse', args=['all'])) return reels = filter((lambda r: not r.hidden), reels) respdict['reels'] = reels #from random import shuffle prizes = {} for reel in reels: prizelist = [] res = db(db.prize.reel==reel).select(db.prize.ALL, orderby=db.prize.repeat) for prize in res: prizelist.append( ( prize.id, prize.typeid, prize_imgurl(prize.id, size=64), prize.name, prize.iskprize<0.01 and not prize.pack) ) #shuffle(imglist) prizes[reel.id] = prizelist respdict['prizes'] = prizes return respdict
def api_v1_showmember(id): """(API) Return details about a member, currently JSON only""" mid = safestr(id) outformat = request.args.get('output','json') sqlstr = """select m.member, m.plan, m.alt_email, m.firstname, m.lastname, m.phone, p.expires_date from members m inner join payments p on m.member=p.member where m.member='%s'""" % mid m = query_db(sqlstr,"",True) if outformat == 'json': output = {'member': m['member'],'plan': m['plan'],'alt_email': m['plan'], 'firstname': m['firstname'],'lastname': m['lastname'], 'phone': m['phone'],'expires_date': m['expires_date']} return json_dump(output), 200, {'Content-type': 'application/json'}
def generateSignature(self, relativeFilePath): ContractsFixture.ensureCacheDirectoryExists() filename = path.basename(relativeFilePath) name = path.splitext(filename)[0] outputPath = path.join(COMPILATION_CACHE, name + 'Signature') lastCompilationTime = path.getmtime(outputPath) if path.isfile(outputPath) else 0 if path.getmtime(relativeFilePath) > lastCompilationTime: print('generating signature for ' + name) extension = path.splitext(filename)[1] signature = None if extension == '.sol': signature = self.compileSolidity(relativeFilePath)['abi'] else: raise with open(outputPath, mode='w') as file: json_dump(signature, file) else: pass#print('using cached signature for ' + name) with open(outputPath, 'r') as file: signature = json_load(file) return(signature)
def main(args): config = load_config(args[0]) check_config(config, args[0]) # gen output directory if path.isfile(config["out_dir"]): raise Exception("%s is a file" % config["out_dir"]) if not path.isdir(config["out_dir"]): makedirs(config["out_dir"]) elif not config["force_cleanup"] and config["cleanup_lvl"] > 0: raise Exception("cleanup_lvl > 0 on an existing directory: %s" % config["out_dir"]) write_config_files(config) eq_map = gen_equal_output_map(config) output_dict = gen_output_dict(config, eq_map) # write output as txt file output_dict_path = path.join(config["out_dir"], "out.txt") write_output_dict_pretty(output_dict, output_dict_path) # read ouput txt file to print it with open(output_dict_path, 'r') as f: print() print(f.read()) keep_files = [output_dict_path] # write output as json file if config["json_output"]: output_dict_json_path = path.join(config["out_dir"], "out.json") with open(output_dict_json_path, 'w') as f: json_dump(output_dict, f) keep_files.append(output_dict_json_path) # clean output directory cleanup(config["cleanup_lvl"], eq_map, config["out_dir"], keep_files)
def build_json_from_mapcycle_txt(): mapcycle_txt = GAME_PATH / 'cfg' / cvar_mapcyclefile.get_string() if not mapcycle_txt.isfile(): mapcycle_txt = GAME_PATH / 'cfg' / DEFAULT_MAPCYCLEFILE if not mapcycle_txt.isfile(): raise FileNotFoundError("Missing {}".format(DEFAULT_MAPCYCLEFILE)) rs = [] with open(mapcycle_txt) as f: for line in f: line = line.strip() if not line: continue if line.startswith('//'): continue rs.append({ 'filename': line, }) with open(MAPCYCLE_JSON_FILE, 'w') as f: json_dump(rs, f, indent=4)