def load(self, jf): """ jf: json file object """ jdata = _json_load(jf) try: first_money = jdata[0] if type(first_money) != int: raise FirstMoneyNotFoundException('First Money Not Found') del(jdata[0]) except IndexError: raise FirstMoneyNotFoundException('First Money Not Found') kakebo = Kakebo(first_money) while jdata: try: date = _parse_date(jdata[0]) except IllegalDateException as e: stderr.write(e) daily = Daily(date) del(jdata[0]) for a_content in jdata[0]: s_content, income = a_content ignore_statics = False if s_content.startswith('#') is True: ignore_statics = True content = Content( s_content, income, ignore_statics=ignore_statics) daily.append(content) del(jdata[0]) kakebo.append(daily) return kakebo
def json_read(path, **kwargs): """ Read a JSON file. Args: path (path-like object): Path to file to load. kwargs: "json.load" kwargs. Returns: dict or list: Un-serialized content """ with open(_fsdecode(path), 'rt') as file: return _json_load(file, **kwargs)
def _load(): if _path.isfile(_file_path): loaded_configuration = {} try: with open(_file_path, 'r') as config_file: loaded_configuration = _json_load(config_file) except Exception: _log.error('failed to load from %s', _file_path) # loaded_configuration.update(_configuration) _configuration.clear() _configuration.update(loaded_configuration) if _log.isEnabledFor(_DEBUG): _log.debug('load => %s', _configuration) _cleanup(_configuration) _configuration[_KEY_VERSION] = __version__ return _configuration
def _load(): if _path.isfile(_file_path): loaded_configuration = {} try: with open(_file_path, 'r') as config_file: loaded_configuration = _json_load(config_file) except: _log.error("failed to load from %s", _file_path) # loaded_configuration.update(_configuration) _configuration.clear() _configuration.update(loaded_configuration) if _log.isEnabledFor(_DEBUG): _log.debug("load => %s", _configuration) _cleanup(_configuration) _configuration[_KEY_VERSION] = __version__ return _configuration
def json_read(path, **kwargs): """ Read a JSON file. Args: path (path-like object): Path to file to load. kwargs: "json.load" kwargs. Returns: dict or list: Un-serialized content """ path = _realpath(_fsdecode(path)) with open(path, 'rt') as file: try: return _json_load(file, **kwargs) except _JSONDecodeError as exception: raise _ConfigurationException( f'Unable to read "{path}": {str(exception)}')
def read_json(file_path: str) -> _tmc_dict: d = {} valid_keys = ['probability', 'state_from', 'state_to'] with open(file_path, mode='r') as file: file.seek(0) data = _json_load(file) if not isinstance(data, list): # pragma: no cover raise ValueError('The file format is not compliant.') for obj in data: if not isinstance(obj, dict): # pragma: no cover raise ValueError('The file format is not compliant.') if sorted(obj.keys()) != valid_keys: # pragma: no cover raise ValueError('The file contains invalid elements.') state_from = obj['state_from'] state_to = obj['state_to'] probability = obj['probability'] if not isinstance(state_from, str) or len(state_from) == 0: # pragma: no cover raise ValueError('The file contains invalid elements.') if not isinstance(state_to, str) or len(state_to) == 0: # pragma: no cover raise ValueError('The file contains invalid elements.') if not isinstance( probability, (float, int, _np.floating, _np.integer)): # pragma: no cover raise ValueError('The file contains invalid elements.') d[(state_from, state_to)] = float(probability) return d
def pytest_generate_tests(metafunc): module = metafunc.module.__name__ func = metafunc.definition.name mark = metafunc.definition.get_closest_marker('parametrize') names = metafunc.fixturenames test_index = module.find('_') + 1 test_name = module[test_index:] if test_name not in _fixtures: base_directory = _os_abspath(_os_dirname(__file__)) fixtures_file = _os_join(base_directory, f'fixtures/fixtures_{test_name}.json') if not _os_isfile(fixtures_file): _fixtures[test_name] = None else: with open(fixtures_file, 'r') as file: fixture = _json_load(file) fixture = _sanitize_fixture_recursive(fixture, _replacements) _fixtures[test_name] = fixture fixture = _fixtures[test_name] values = [] ids = [] if len(names) > 0 and mark is None and fixture is not None and len( fixture) > 0: if isinstance(fixture, dict): values, ids = _parse_fixture_dictionary(fixture, names, func) elif isinstance(fixture, list): values, ids = _parse_fixture_list(fixture, names, func) metafunc.parametrize(names, values, False, ids)
# </editor-fold> # <editor-fold desc="Loads and declarations"> _this_module = _sys_modules[__name__] _classmap = _get_classmap() _command_manager = stdout_redirected _DISKFULL_RE = _re_compile('Disk Full') _ID = 'OpendssdirectEnhancer' setattr(_this_module, 'utils', _odr.utils) # loads chains of functions through which to pass the rough outputs of opendss. with open(TREATMENTS_PATH, 'r') as _tfile: _rtrt = _json_load(_tfile) _trt = dict() for _subdic_name, _subdic in _rtrt.items(): _nsd = { k: tuple([globals()[_t] for _t in _v]) for k, _v in _subdic.items() } _trt[_subdic_name] = _nsd # loads measurement units for the interface of components without self-referencing. # the components with self referencing, like lines and loadshapes, are taken care of at runtime. with open(UNIT_MEASUREMENT_PATH, 'r') as _ufile: _rumr = _json_load(_ufile) _umr = dict() for _subdic_name, _subdic in _rumr.items(): _nsd = {_k: _UM.parse_units(_v) for _k, _v in _subdic.items()}
def json_load(filename): can_read_file(filename) with open(filename, 'r') as f: res = _json_load(f) return res
def main(): parser = get_option_parser() (options, filenames) = parser.parse_args() print(options._in) if len(filenames) >= 3: error('Filenames are too many.') if options._in: filename = filenames[0] with open(filename) as jf: sudokus = _json_load(jf) sudokus.append(get_entered_sudoku()) with open(filename, 'w') as jf: _json_dump(sudokus, jf, indent=4) print('Done') return show_page_number = not(bool(options.hidden_page_number)) show_title = not(bool(options.hidden_title)) if options.out_filename is None: out_filename = os.path.splitext(filenames[0])[0] + '.pdf' else: out_filename = options.out_filename if options.problem_name is None: problem_name = 'Problem' else: problem_name = options.problem_name if options.answer_name is None: answer_name = 'Answer' else: answer_name = options.answer_name c = canvas.Canvas( out_filename, bottomup=False) if len(filenames) == 1: # case: only problem files json_filename = filenames[0] try: with open(json_filename) as f: sudokus = Sudoku.load(f) except IOError: error('Filename {} is not found.'.format(json_filename)) except ValueError: error('Illegal format') page_infos = [] for j, sudoku in enumerate(sudokus): page_infos.append(SudokuPageInfo(sudoku, '{} {}'.format( problem_name, j + 1), show_page_number=show_page_number, show_title=show_title, page_number=j + 1)) elif len(filenames) == 2: prob_filename, ques_filename = filenames try: with open(prob_filename) as f: prob_sudokus = Sudoku.load(f) except IOError: error('Filename {} is not found.'.format(prob_filename)) except ValueError: error('Illegal format') try: with open(ques_filename) as f: ques_sudokus = Sudoku.load(f) except IOError: error('Filename {} is not found'.format(ques_filename)) except ValueError: error('Illegal format') page_infos = [] page_number = 1 for j, sudoku in enumerate(prob_sudokus): page_infos.append( SudokuPageInfo(sudoku, '{} {}'.format(problem_name, j + 1), show_page_number=show_page_number, show_title=show_title, page_number=page_number)) page_number += 1 for j, sudoku in enumerate(ques_sudokus): page_infos.append( SudokuPageInfo(sudoku, '{} {}'.format(answer_name, j + 1), show_page_number=show_page_number, show_title=show_title, page_number=page_number)) page_number += 1 for i, page_info in enumerate(page_infos): page_info.write_pdf(c) if i != len(page_infos) - 1: c.showPage() c.save()
def load_json_from_file(file_path: str) -> str: result = None with open(file_path) as fp: result = _json_load(fp) return result
from json import load as _json_load import os from typing import Dict import utils PWD = os.getcwd() if (os.path.sep + 'src') in PWD: PWD = os.path.join(os.getcwd(), "pss_data") else: PWD = os.path.join(os.getcwd(), "src", "pss_data") ID_NAMES_FILEPATH = os.path.join(PWD, "id_names.json") ID_NAMES_INFO: Dict[str, str] with open(ID_NAMES_FILEPATH) as fp: ID_NAMES_INFO = _json_load(fp)