Esempio n. 1
0
 def _load_json(path: Path) -> Union[dict, None]:
     if not path.exists():
         return None
     with path.open('r', encoding='utf-8') as f:
         if HAS_RAPIDJSON:
             data = load(f)
         else:
             data = load(f, encoding='utf-8')
     return data
Esempio n. 2
0
async def settings_parser(guild_id):
    # Parse settings per guild
    with open(
            f'{os.path.dirname(os.path.realpath(__file__))}/cogs/core_multi_guild/cache/guild_settings.json'
    ) as settings_json:
        try:
            guild_settings = rapidjson.load(settings_json).get(guild_id)
        except:
            guild_settings = rapidjson.load(settings_json).get('default')
    return guild_settings
Esempio n. 3
0
    def __init__(self, n_log_names: list, s_log_names: list, c_log_name: str):
        potent = []
        fired_neuron = []
        weight = []
        fired_synapse = []
        for nl in n_log_names:
            with open(os.path.join(LOG_path, nl), 'r') as logfile:
                tmp = rapidjson.load(logfile, number_mode=rapidjson.NM_NATIVE)
                potent.append(tmp[str(MULTI_potent_log)])
                fired_neuron.append(tmp[str(MULTI_fired_neuron_log)])
        for sl in s_log_names:
            with open(os.path.join(LOG_path, sl), 'r') as logfile:
                tmp = rapidjson.load(logfile, number_mode=rapidjson.NM_NATIVE)
                weight.append(tmp[str(MULTI_weight_log)])
                fired_synapse.append(tmp[str(MULTI_fired_synapse_log)])

        with open(os.path.join(LOG_path, c_log_name)) as logfile:
            self.connections = rapidjson.load(logfile,
                                              number_mode=rapidjson.NM_NATIVE)

        self.merged_potent = []
        for t in range(len(potent[0])):
            tick = []
            for thread in potent:
                tick.extend(thread[t])
            self.merged_potent.append(tick)

        self.merged_f_n = []
        for t in range(len(fired_neuron[0])):
            tick = []
            for thread in fired_neuron:
                tick.extend(thread[t])
            self.merged_f_n.append(tick)

        self.merged_weight = []
        for t in range(len(weight[0])):
            tick = []
            for thread in weight:
                tick.extend(thread[t])
            self.merged_weight.append(tick)

        self.merged_f_s = []
        for t in range(len(fired_synapse[0])):
            tick = []
            for thread in fired_synapse:
                tick.extend(thread[t])
            self.merged_f_s.append(tick)

        self.n_num = len(self.merged_potent[0])
        self.s_num = len(self.connections)
        self.max_tick = len(self.merged_potent)
Esempio n. 4
0
def test_collection_on_post(patch, magic, collection, siren):
    request = magic()
    response = magic()
    user = magic()
    patch.object(rapidjson, 'load')
    patch.object(Collections, 'apply_owner')
    collection.on_post(request, response, user=user)
    rapidjson.load.assert_called_with(request.bounded_stream)
    collection.apply_owner.assert_called_with(user, rapidjson.load())
    collection.model.write.assert_called_with(**rapidjson.load())
    siren.__init__.assert_called_with(collection.model,
                                      collection.model.write(),
                                      request.path)
    assert response.body == siren.encode()
Esempio n. 5
0
def load_qubit_operator(file: TextIO) -> QubitOperator:
    """Load an operator object from a file.
    Args:
        file (str or file-like object): the name of the file, or a file-like object.
    Returns:
        op (openfermion.ops.QubitOperator): the operator.
    """

    if isinstance(file, str):
        with open(file, "r") as f:
            data = json.load(f)
    else:
        data = json.load(file)

    return convert_dict_to_qubitop(data)
Esempio n. 6
0
def load_args_from_config_file(path=None):
    quiet = False
    if path is None:
        quiet = True
        path = os.path.join(XDG_CONFIG_HOME, 'satori', 'rtm-cli.config')
    result = {}
    try:
        try:
            with open(path) as f:
                fileconfig = toml.load(f)
                for k, v in fileconfig.items():
                    print("From config file: {0} = {1}".format(k, v),
                          file=sys.stderr)
                    result[u'--' + k] = v
        except toml.TomlDecodeError:
            try:
                # Just in case the config file has the format credentials.json
                with open(path) as f:
                    fileconfig = json.load(f)
                    for k, v in fileconfig.items():
                        if k == 'auth_role_name':
                            k = 'role_name'
                        if k == 'auth_role_secret_key':
                            k = 'role_secret'
                        print("From config file: {0} = {1}".format(k, v),
                              file=sys.stderr)
                        result[u'--' + k] = v
            except ValueError:
                print("Invalid config file at {0}".format(path),
                      file=sys.stderr)
    except (IOError, OSError):
        if not quiet:
            print("Couldn't read the config file at {0}".format(path),
                  file=sys.stderr)
    return result
Esempio n. 7
0
    def __init__(self,
                 url="ws://127.0.0.1",
                 port="8964",
                 bulkSize=20,
                 userDefinedDictFILE=None):
        self.port = port
        if url.startswith("ws"):
            self.ws_url = "{}:{}/Articut/WebSocket".format(url, port)
            self.url = "{}:{}".format(url.replace("ws", "http"), port)
        elif url.startswith("http"):
            self.ws_url = "{}:{}/Articut/WebSocket".format(
                url.replace("http", "ws"), port)
            self.url = "{}:{}".format(url, port)
        else:
            self.ws_url = "ws://{}:{}/Articut/WebSocket".format(url, port)
            self.url = "http://{}:{}".format(url, port)
        #enableTrace(True)
        self.ws = create_connection("{}/API/".format(self.ws_url))
        self.ws_bulk = create_connection("{}/BulkAPI/".format(self.ws_url))

        self.bulkSize = bulkSize

        self.userDefinedDictFILE = None
        self.openDataPlaceAccessBOOL = False
        self.fileSizeLimit = 1024 * 1024 * 10  # 10 MB
        self.userDefinedDICT = {}

        if userDefinedDictFILE:
            try:
                if os.path.getsize(userDefinedDictFILE) <= self.fileSizeLimit:
                    userDefinedFile = json.load(
                        open(userDefinedDictFILE, "r", encoding="utf8"))
                    if type(userDefinedFile) == dict:
                        self.userDefinedDICT = userDefinedFile
                    else:
                        print("User Defined File must be dict type.")
                        return {
                            "status":
                            False,
                            "msg":
                            "UserDefinedDICT Parsing ERROR. Please check your the format and encoding."
                        }
                else:
                    print("Maximum file size limit is 10 MB.")
            except Exception as e:
                print("User Defined File Loading Error.")
                print(str(e))
                return {
                    "status":
                    False,
                    "msg":
                    "UserDefinedDICT Parsing ERROR. Please check your the format and encoding."
                }

        # Toolkit
        self.analyse = AnalyseManager()
        self.localRE = TaiwanAddressAnalizer(locale="TW")
        self.LawsToolkit = LawsToolkit()
        self.NER = GenericNER()
        self.POS = ArticutPOS()
def remove_images_from_reconstruction(sfm_data, images_to_remove):
    with open(sfm_data, "r") as infile:
        data = json.load(infile)

    # Get all valid views (not in image to remove) and remove poses for images to remove.
    invalid_pose_ids = [
        view["value"]["ptr_wrapper"]["data"]["id_pose"]
        for view in data["views"]
        if view["value"]["ptr_wrapper"]["data"]["filename"] in images_to_remove
    ]
    data["extrinsics"] = [
        pose for pose in data["extrinsics"]
        if pose["key"] not in invalid_pose_ids
    ]
    valid_view_ids = [
        view["value"]["ptr_wrapper"]["data"]["id_view"]
        for view in data["views"]
    ]

    # Removing structures referencing those removed views.
    for structure in data["structure"]:
        for observation in structure["value"]["observations"]:
            if observation["key"] not in valid_view_ids:
                structure["value"]["observations"].remove(observation)

    # find a point has fewer than 2 views and remove them
    data["structure"][:] = [
        data["structure"][i] for i in range(0, len(data["structure"]))
        if len(data["structure"][i]["value"]["observations"]) >= 2
    ]

    with open(sfm_data, "w+") as outfile:
        json.dump(data, outfile, indent=4)
 def test_load_simple(self):
     jsonstr = """1"""
     stream = StringIO()
     stream.write(jsonstr)
     stream.seek(0)
     retobj = rapidjson.load(stream)
     self.assertEqual(retobj, 1)
Esempio n. 10
0
def analyze_graphs(path):

    inpath, outpath = path

    if not os.path.exists(outpath):
        try:
            with open(inpath, 'r') as f:
                all_graphs = json.load(f)

            times = list(all_graphs.keys())
            data = init_empyty_df(sorted(times))

            for t, g in all_graphs.items():
                G = json_graph.node_link_graph(g)
                #nx.write_gexf(nxG,'2018_10.gexf')
                G = nk.nxadapter.nx2nk(G)
                G.removeSelfLoops()
                #o = nk.overview(G)
                measures = graph_measures(G)
                data.loc[t] = measures

            data.to_csv(outpath)

            print('Save to %s' % outpath)
        except:
            print('Skip %s' % s)
            pass
Esempio n. 11
0
def _load_shard_channeldata(subdir, fn, repodata):
    pth = os.path.join("repodata-shards", get_shard_path(subdir, fn))
    if not os.path.exists(pth):
        assert False, repodata["packages"][fn]
    with open(pth, "r") as fp:
        shard = json.load(fp)
    return shard["channeldata"], shard["channeldata_version"]
Esempio n. 12
0
def assign_room(config_file, times, verbose, shuffle_room, **kwargs):
    """Assign room to sensors to optimize the sum of intra-room correlation coefficients
    """
    with open(config_file) as file:
        config_dict = rapidjson.load(file)
    config_dict["verbose"] = verbose >= 2

    for key, val in kwargs.items():
        if val:
            config_dict[key] = val

    config = config_loader.ColocationConfig(**config_dict)
    io.make_dir(config.base_file_name)

    if times == 1:
        tasks.TASKS[config.task].run(config)
        return

    accuracies = np.zeros(times)
    for i in progressbar.progressbar(range(times)):
        if shuffle_room:
            random.shuffle(config.selected_rooms)
        config.seed = i
        _, accu, _ = tasks.TASKS[config.task].run(config)
        accuracies[i] = accu

    io.save_npz(accuracies, config.join_name("accuracies.npz"))
    summary = scipy.stats.describe(accuracies)
    if verbose:
        print("mean accuracy: {}, minmax: {}".format(summary.mean,
                                                     summary.minmax))
Esempio n. 13
0
 def __init__(self):
   try:
     f = open('last-plot-data.json', 'r')
     self.lastJson = rapidjson.load(f)
     f.close()
   except:
     self.lastJson = rapidjson.loads('{}')
Esempio n. 14
0
    def __init__(self, url="http://127.0.0.1", port="8964", bulkSize=20, userDefinedDictFILE=None):
        self.port = port
        if "http" not in url:
            self.url = "http://{}:{}".format(url, port)
        else:
            self.url = "{}:{}".format(url, port)
        self.bulkSize = bulkSize
        self.processes = cpu_count()
        if self.processes > 8:
            self.processes = 8

        self.userDefinedDictFILE = None
        self.openDataPlaceAccessBOOL=False
        self.fileSizeLimit = 1024 * 1024 * 10    # 10 MB
        self.userDefinedDICT = {}

        if userDefinedDictFILE:
            try:
                if os.path.getsize(userDefinedDictFILE) <= self.fileSizeLimit:
                    userDefinedFile = json.load(open(userDefinedDictFILE, "r", encoding="utf8"))
                    if type(userDefinedFile) == dict:
                        self.userDefinedDICT = userDefinedFile
                    else:
                        print("User Defined File must be dict type.")
                        return {"status": False, "msg": "UserDefinedDICT Parsing ERROR. Please check your the format and encoding."}
                else:
                    print("Maximum file size limit is 10 MB.")
            except Exception as e:
                print("User Defined File Loading Error.")
                print(str(e))
                return {"status": False, "msg": "UserDefinedDICT Parsing ERROR. Please check your the format and encoding."}

        # Toolkit
        self.analyse = AnalyseManager()
        self.localRE = TaiwanAddressAnalizer(locale="TW")
Esempio n. 15
0
 def test_load_with_io_stringio(self):
     jsonstr = """{"test": [1, "hello"]}"""
     stream = StringIO()
     stream.write(jsonstr)
     stream.seek(0)
     retobj = rapidjson.load(stream)
     self.assertEqual(retobj["test"], [1, "hello"])
Esempio n. 16
0
 def test_load_simple(self):
     jsonstr = """1"""
     stream = StringIO()
     stream.write(jsonstr)
     stream.seek(0)
     retobj = rapidjson.load(stream)
     self.assertEqual(retobj, 1)
 def test_load_with_io_stringio(self):
     jsonstr = """{"test": [1, "hello"]}"""
     stream = StringIO()
     stream.write(jsonstr)
     stream.seek(0)
     retobj = rapidjson.load(stream)
     self.assertEqual(retobj["test"], [1, "hello"])
Esempio n. 18
0
 def __load_metadata(self, file_location):
     if not os.path.exists(file_location):
         print(f"No metadata file found at this location {file_location}")
         exit()
     with open(file_location) as metadata_file:
         metadata = json.load(metadata_file)
     return metadata
Esempio n. 19
0
def update_postgres():
    PSQL_CREDENTIALS = rapidjson.load(
        open('/Users/joelhoward/repos/NHLFantasyPy/PSQL_CREDENTIALS.json'))
    conn = psycopg2.connect("dbname=" + PSQL_CREDENTIALS['dbname'] + " user="******" password="******"SELECT date FROM game_logs;")
    from_date = max([d[0] for d in cur.fetchall()])
    game_logs, player_info, team_info = data_scrape(do_update=True,
                                                    from_date=from_date)
    copy_from_stringio(conn, game_logs, 'game_logs')
    cur.execute("SELECT player_id FROM player_info;")
    ids = [id[0] for id in cur.fetchall()]
    new_ids = []
    for id in list(player_info['player_id']):
        if id not in ids:
            new_ids.append(id)
    if len(new_ids) > 0:
        copy_from_stringio(conn,
                           player_info[player_info['player_id'].isin(new_ids)],
                           'player_info')
    cur.execute("DELETE FROM team_info;")
    copy_from_stringio(conn, team_info, 'team_info')
    conn.commit()
    cur.close()
    conn.close()
Esempio n. 20
0
    def load(cls, loc: str):
        """
        Instantiates the DataStorage-inherited class by loading all files saved by `save` of that same class.
        :param str loc: Path to directory from which to load data
        :return: An instance of this class with the content of the files
        """

        fields = dict()
        # List of fields non-loadable using the SERIALIZATIONS functions
        generals = list()

        for field_name in cls.__dict__["__dataclass_fields__"]:
            for _, load, ext in SERIALIZATIONS.values():
                datapath = os.path.join(loc, f"{field_name}.{ext}")
                if os.path.exists(datapath):
                    fields[field_name] = load(datapath)
                    break
            else:
                generals.append(field_name)

        # Check if the field was saved with pickle
        any_json = False
        for key in generals:
            pfile = os.path.join(loc, f"{key}.{cls._pickle_ext}")
            if os.path.isfile(pfile):
                with open(pfile, "rb") as f:
                    fields[key] = pickle.load(f)
            else:
                any_json = True

        if any_json:
            with open(os.path.join(loc, cls._json_name), encoding="utf-8") as f:
                fields.update(rapidjson.load(f))

        return cls(**fields)
Esempio n. 21
0
def json_load(datafile: IO) -> Any:
    """
    load data with rapidjson
    Use this to have a consistent experience,
    set number_mode to "NM_NATIVE" for greatest speed
    """
    return rapidjson.load(datafile, number_mode=rapidjson.NM_NATIVE)
Esempio n. 22
0
def load(
    fp: IO[str],
    object_hook: "Callable[[dict], Any]" = object_hook,
    **kwargs: Any,
) -> dict:
    """Loads a file object as JSON, with appropriate object hooks."""
    return json.load(fp, object_hook=object_hook, **kwargs)
Esempio n. 23
0
def test_file_object():
    for stream in tempfile.TemporaryFile(), tempfile.TemporaryFile('w+', encoding='utf-8'):
        with stream:
            datum = ['1234567890', 1234, 3.14, '~𓆙~']
            rj.dump(datum, stream)
            stream.seek(0)
            assert rj.load(stream) == datum
Esempio n. 24
0
def getCityForecastDict(city):
    forecastDICT = {}
    for c in CityDICT:
        if city in c:
            forecastDICT = json.load(open(c, "r", encoding="UTF-8"))
            break
    return forecastDICT
Esempio n. 25
0
def load_interaction_operator(file: LoadSource) -> InteractionOperator:
    """Load an interaction operator object from a file.
    Args:
        file (str or file-like object): the name of the file, or a file-like object.

    Returns:
        op (openfermion.ops.InteractionOperator): the operator.
    """

    if isinstance(file, str):
        with open(file, "r") as f:
            data = json.load(f)
    else:
        data = json.load(file)

    return convert_dict_to_interaction_op(data)
Esempio n. 26
0
def _read_shard_chunk(shard_pths):
    shards = []
    for shard_pth in shard_pths:
        if os.path.exists(shard_pth):
            with open(shard_pth, "r") as fp:
                shards.append(json.load(fp))
    return shards
Esempio n. 27
0
def get_last_played_level():
    mc_dir = SETTINGS.value("MinecraftDirectory",
                            utils.get_default_minecraft_dir())
    mc_saves = os.path.join(mc_dir, "saves")

    worlds_recently_modified = sorted(
        [os.path.join(mc_saves, s) for s in os.listdir(mc_saves)],
        key=os.path.getmtime,
        reverse=True)
    for world in worlds_recently_modified:
        try:
            level = NBTFile(os.path.join(world, "level.dat"))
            with open(
                    os.path.join(
                        world, "advancements",
                        os.listdir(os.path.join(world,
                                                "advancements"))[0])) as f:
                advancements = dict(json.load(f))
            break
        except:
            continue

    data = {
        "name": str(level["Data"]["LevelName"]),
        "version": str(level["Data"]["Version"]["Name"]),
        "dataversion": int(str(level["Data"]["DataVersion"])),
        "adv": advancements
    }

    return data
Esempio n. 28
0
def load_config_file(path: str) -> Dict[str, Any]:
    """
    Loads a config file from the given path
    
    Parameters:
    -----------
    path: 
        path as str
        
    Return: 
    -------
        configuration as dictionary
    """
    try:
        # Read config from stdin if requested in the options
        with open(path) if path != '-' else sys.stdin as file:
            config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
    except FileNotFoundError:
        raise OperationalException(
            f'Config file "{path}" not found!'
            ' Please create a config file or check whether it exists.')
    except rapidjson.JSONDecodeError as e:
        err_range = log_config_error_range(path, str(e))
        raise OperationalException(
            f'{e}\n'
            f'Please verify the following segment of your configuration:\n{err_range}'
            if err_range else 'Please verify your configuration file for syntax errors.'
        )

    return config
Esempio n. 29
0
def load_file(path: Path) -> Dict[str, Any]:
    try:
        with path.open('r') as file:
            config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
    except FileNotFoundError:
        raise OperationalException(f'File "{path}" not found!')
    return config
Esempio n. 30
0
def getRateData():
    rateDICT = None

    dateSTR = datetime.utcnow().strftime("%Y%m%d")
    filePath = os.path.join(BASE_PATH, "rate_{}.json".format(dateSTR))

    # 如果本機已有當天的匯率資料,直接讀取使用
    if os.path.exists(filePath):
        rateDICT = json.load(open(filePath, encoding="utf-8"))

    # 本機無資料時,利用 [全球即時匯率API] 取得最新匯率
    else:
        # 取得最新的匯率資料
        result = get("https://tw.rter.info/capi.php")
        if result.status_code == codes.ok:
            rateDICT = result.json()

            # 儲存匯率資料,下次就可以直接讀取
            with open(filePath, "w", encoding="utf-8") as f:
                json.dump(rateDICT, f, ensure_ascii=False)

        else:
            print("[ERROR] getRateData() status_code => {}".format(
                result.status_code))

    return rateDICT
Esempio n. 31
0
def load_interaction_rdm(file: TextIO) -> InteractionRDM:
    """Load an interaction RDM object from a file.
    Args:
        file: a file-like object to load the interaction RDM from.

    Returns:
        The interaction RDM.
    """

    if isinstance(file, str):
        with open(file, "r") as f:
            data = json.load(f)
    else:
        data = json.load(file)

    return convert_dict_to_interaction_rdm(data)
Esempio n. 32
0
def get_config_values(project_path):
    config = get_full_path(project_path)
    print(config)
    with open(config) as file:
        config_values = rapidjson.load(file)

    return config_values
Esempio n. 33
0
 def open(self, path, font=None):
     with open(path, 'r') as file:
         d = json.load(file)
     assert self.version >= d.pop(".formatVersion")
     if font is not None:
         self._font = font
     return self.structure(d, Font)
Esempio n. 34
0
 def on_post(self, request, response, **params):
     json = rapidjson.load(request.bounded_stream)
     self.apply_owner(params['user'], json)
     item = self.model.write(**json)
     if item is None:
         raise HTTPBadRequest('Bad request', 'bad request')
     body = Siren(self.model, item, request.path)
     response.body = body.encode()
Esempio n. 35
0
 def test_load(self):
     jsonstr = b"""{"test": [1, "hello"]}"""
     fp = NamedTemporaryFile(delete=False)
     fp.write(jsonstr)
     fp.close()
     check_fp = open(fp.name)
     retobj = rapidjson.load(check_fp)
     self.assertEqual(retobj["test"], [1, "hello"])
     # teardown
     check_fp.close()
     os.remove(fp.name)
Esempio n. 36
0
def test_item_on_patch(patch, magic, item, siren):
    patch.object(rapidjson, 'load')
    request = magic()
    response = magic()
    user = magic()
    item.on_patch(request, response, user=user, id=1)
    item.model.select().where.assert_called_with(False)
    user.do.assert_called_with('edit', item.model.select().where(),
                               item.model)
    user.do().get().edit.assert_called_with(rapidjson.load())
    siren.__init__.assert_called_with(item.model, user.do().get(),
                                      request.path)
    assert response.body == siren().encode()
Esempio n. 37
0
 def test_load_with_utf8(self):
     jsonstr = """{"test": [1, "こんにちは"]}"""
     if sys.version_info >= (3, ):
         fp = NamedTemporaryFile(mode='w', delete=False, encoding='utf-8')
     else:
         fp = NamedTemporaryFile(mode='w', delete=False)
     fp.write(jsonstr)
     fp.close()
     check_fp = open(fp.name)
     retobj = rapidjson.load(check_fp)
     self.assertEqual(retobj["test"], [1, "こんにちは"])
     # teardown
     check_fp.close()
     os.remove(fp.name)
Esempio n. 38
0
 def on_patch(self, request, response, **params):
     """
     Executes a patch request on a single item
     """
     user = params['user']
     query = self.model.select().where(self.model.id == params['id'])
     try:
         result = user.do('edit', query, self.model).get()
     except DoesNotExist:
         raise HTTPNotFound()
     json = rapidjson.load(request.bounded_stream)
     if result.edit(json) is None:
         raise HTTPBadRequest('Bad request', 'bad request')
     body = Siren(self.model, result, request.path)
     response.body = body.encode()