Ejemplo n.º 1
0
def encode_task(task):
    if not task.deps:
        result = assoc(task.spec, 'inputs', task.inputs)
        output_sha = sha(bencode.bencode(result))
        return assoc(result, 'output', output_sha)
    first_dep = task.deps[0]
    inputs = assoc(task.inputs, first_dep.name,
                   encode_task(first_dep)['output'])
    return encode_task(Task(task.name, task.spec, task.deps[1:], inputs))
Ejemplo n.º 2
0
def test_assoc():
    assert assoc({}, "a", 1) == {"a": 1}
    assert assoc({"a": 1}, "a", 3) == {"a": 3}
    assert assoc({"a": 1}, "b", 3) == {"a": 1, "b": 3}

    # Verify immutability:
    d = {'x': 1}
    oldd = d
    assoc(d, 'x', 2)
    assert d is oldd
Ejemplo n.º 3
0
def test_assoc():
    assert assoc({}, "a", 1) == {"a": 1}
    assert assoc({"a": 1}, "a", 3) == {"a": 3}
    assert assoc({"a": 1}, "b", 3) == {"a": 1, "b": 3}

    # Verify immutability:
    d = {'x': 1}
    oldd = d
    assoc(d, 'x', 2)
    assert d is oldd
Ejemplo n.º 4
0
    def fit(self, X, y=None, sample_weight=None, exposure=None):
        fit_args = self._process_args(X=X,
                                      y=y,
                                      sample_weight=sample_weight,
                                      exposure=exposure)

        # Create internal cross-validating estimators
        self.cross_validating_estimators_ = OrderedDict(
            (k,
             CrossValidatingEstimator(v,
                                      cv=self.cv,
                                      n_jobs=self.n_jobs,
                                      verbose=self.verbose,
                                      pre_dispatch=self.pre_dispatch))
            for k, v in self.ordered_regressors.items())

        #         frozendict(valmap(lambda x:
        #             CrossValidatingEstimator(x, cv=self.cv, n_jobs=self.n_jobs,
        #                                      verbose=self.verbose,
        #                                      pre_dispatch=self.pre_dispatch), self.regressors).items())

        # Fit the inner regressors using cross-validation
        for est_name, est in self.cross_validating_estimators_.items():
            if self.verbose > 0:
                print('Super learner is fitting %s...' % est_name)
            est.fit(**fit_args)
            if self.verbose > 0:
                print('Super learner finished fitting %s.' % est_name)

        # Fit the outer meta-regressor.  Cross validation is not used here.  Instead,
        # users of the SuperLearner are free to wrap the SuperLearner in a
        # CrossValidatingEstimator.
        meta_fit_args = assoc(
            fit_args, 'X',
            np.concatenate(tuple(
                map(growd(2), [
                    est.cv_predictions_
                    for est in self.cross_validating_estimators_.values()
                ])),
                           axis=1))
        if self.y_transformer is not None:
            self.y_transformer_ = clone(self.y_transformer).fit(**fit_args)
            meta_fit_args = assoc(
                meta_fit_args, 'y',
                self.y_transformer_.transform(
                    **dissoc(fit_args, 'sample_weight', 'y')))

        if self.verbose > 0:
            print('Super learner fitting meta-regressor...')
        self.meta_regressor_ = clone(self.meta_regressor).fit(**meta_fit_args)
        if self.verbose > 0:
            print('Super learner meta-regressor fitting complete.')

        # All scikit-learn compatible estimators must return self from fit
        return self
Ejemplo n.º 5
0
 def test_split_only_var_key(self):
     """
     Only the values for the specified key will be split, no matter how long
     the other keys are.
     """
     message = 'x: {x}, y: {y}'
     event = {'x': '123', 'y': '12345'}
     result = split_cf_messages(message, 'x', max_length=14)(event)
     self.assertEqual(result, [(assoc(event, 'x', '1'), message),
                               (assoc(event, 'x', '2'), message),
                               (assoc(event, 'x', '3'), message)])
Ejemplo n.º 6
0
    def test_assoc(self):
        D, kw = self.D, self.kw
        assert assoc(D({}), "a", 1, **kw) == D({"a": 1})
        assert assoc(D({"a": 1}), "a", 3, **kw) == D({"a": 3})
        assert assoc(D({"a": 1}), "b", 3, **kw) == D({"a": 1, "b": 3})

        # Verify immutability:
        d = D({'x': 1})
        oldd = d
        assoc(d, 'x', 2, **kw)
        assert d is oldd
    def test_assoc(self):
        D, kw = self.D, self.kw
        assert assoc(D({}), "a", 1, **kw) == D({"a": 1})
        assert assoc(D({"a": 1}), "a", 3, **kw) == D({"a": 3})
        assert assoc(D({"a": 1}), "b", 3, **kw) == D({"a": 1, "b": 3})

        # Verify immutability:
        d = D({'x': 1})
        oldd = d
        assoc(d, 'x', 2, **kw)
        assert d is oldd
Ejemplo n.º 8
0
 def test_split_only_var_key(self):
     """
     Only the values for the specified key will be split, no matter how long
     the other keys are.
     """
     message = "x: {x}, y: {y}"
     event = {"x": "123", "y": "12345"}
     result = split_cf_messages(message, "x", max_length=14)(event)
     self.assertEqual(
         result,
         [(assoc(event, "x", "1"), message), (assoc(event, "x", "2"), message), (assoc(event, "x", "3"), message)],
     )
Ejemplo n.º 9
0
 def test_split_only_var_key(self):
     """
     Only the values for the specified key will be split, no matter how long
     the other keys are.
     """
     message = 'x: {x}, y: {y}'
     event = {'x': '123', 'y': '12345'}
     result = split_cf_messages(message, 'x', max_length=14)(event)
     self.assertEqual(
         result,
         [(assoc(event, 'x', '1'), message),
          (assoc(event, 'x', '2'), message),
          (assoc(event, 'x', '3'), message)])
Ejemplo n.º 10
0
 def test_no_split_on_empty_field(self):
     """
     Do not split the event the field is an empty list, even if it is too
     long.
     """
     message = 'Hello {there} human being {punctuation}'
     event = {'there': [], 'punctuation': '!', 'extra': 'unused'}
     result = split_cf_messages(message, 'there', max_length=5)(event)
     self.assertEqual(result, [(assoc(event, 'there', ''), message)])
Ejemplo n.º 11
0
 def test_no_split_on_empty_field(self):
     """
     Do not split the event the field is an empty list, even if it is too
     long.
     """
     message = "Hello {there} human being {punctuation}"
     event = {"there": [], "punctuation": "!", "extra": "unused"}
     result = split_cf_messages(message, "there", max_length=5)(event)
     self.assertEqual(result, [(assoc(event, "there", ""), message)])
Ejemplo n.º 12
0
 def test_no_split_on_empty_field(self):
     """
     Do not split the event the field is an empty list, even if it is too
     long.
     """
     message = 'Hello {there} human being {punctuation}'
     event = {'there': [], 'punctuation': '!', 'extra': 'unused'}
     result = split_cf_messages(message, 'there', max_length=5)(event)
     self.assertEqual(result, [(assoc(event, 'there', ''), message)])
Ejemplo n.º 13
0
 def test_no_need_to_split_if_below_length(self):
     """
     Do not split the event if the message is sufficiently short.  However,
     do format it so that the list becomes a comma-separated string.
     """
     message = "Hello {there} human being {punctuation}"
     event = {"there": [1, 2, 3, 4], "punctuation": "!", "extra": "unused"}
     result = split_cf_messages(message, "there")(event)
     self.assertEqual(result, [(assoc(event, "there", "1, 2, 3, 4"), message)])
Ejemplo n.º 14
0
 def test_no_need_to_split_if_below_length(self):
     """
     Do not split the event if the message is sufficiently short.  However,
     do format it so that the list becomes a comma-separated string.
     """
     message = 'Hello {there} human being {punctuation}'
     event = {'there': [1, 2, 3, 4], 'punctuation': '!', 'extra': 'unused'}
     result = split_cf_messages(message, 'there')(event)
     self.assertEqual(result,
                      [(assoc(event, 'there', '1, 2, 3, 4'), message)])
Ejemplo n.º 15
0
 async def create(self, dto: CreateTodoItemDto):
     self.items, new_item = pipe(
         self.items.keys(),
         last,
         lambda key: key + 1,
         lambda new_key: TodoItem(
             id=new_key, msg=dto.msg, is_done=dto.is_done),
         lambda item: (assoc(self.items, item.id, item), item),
     )
     return new_item
Ejemplo n.º 16
0
def get_credentials(client_secret_file: str,
                    token_storage_file: str,
                    user_id: str) -> Credentials:
    tokens = _get_tokens(token_storage_file)
    credentials = option.cata(
        Credentials,
        lambda: _get_new_credentials(client_secret_file)
    )(tokens.get(user_id))
    _save_tokens(token_storage_file, assoc(tokens, user_id, credentials.token))
    return credentials
Ejemplo n.º 17
0
 def test_no_need_to_split_if_below_length(self):
     """
     Do not split the event if the message is sufficiently short.  However,
     do format it so that the list becomes a comma-separated string.
     """
     message = 'Hello {there} human being {punctuation}'
     event = {'there': [1, 2, 3, 4], 'punctuation': '!', 'extra': 'unused'}
     result = split_cf_messages(message, 'there')(event)
     self.assertEqual(result,
                      [(assoc(event, 'there', '1, 2, 3, 4'), message)])
Ejemplo n.º 18
0
 def on_message(self, headers, message):
     self._logger.info(headers)
     decoded_messages = json.loads(message)
     self._logger.info('Received a total of {} message(s)'
                       .format(len(decoded_messages)))
     for decoded_message in decoded_messages:
         doc = assoc(decoded_message, 'type', headers['subscription'])
         r.table('raw_messages').insert(doc).run(conn)
         tm.process_message(doc, conn)
     self._mq.ack(id=headers['message-id'],
                  subscription=headers['subscription'])
Ejemplo n.º 19
0
def test_dicttoolz():
    d1 = {'foo': 'bar'}
    d2 = {'baz': 'quux'}
    assert_that(merge(d1, d2)).is_equal_to({'foo': 'bar', 'baz': 'quux'})
    assert_that(d1).is_equal_to({'foo': 'bar'})
    assert_that(assoc(d1, 'a', 1)).is_equal_to({'foo': 'bar', 'a': 1})
    assert_that(dissoc(d2, 'baz')).is_equal_to({})
    struct = {'a': [{'c': 'hello'}]}
    assert_that(get_in(['a', 0, 'c'], struct)).is_equal_to(struct['a'][0]['c'])
    assert_that(get_in(['a', 0, 'd'], struct,
                       'not found')).is_equal_to('not found')
Ejemplo n.º 20
0
def _update_state(state, action):
    new_worker_pos = _move_projection_tile(state['worker']['pos'], action)
    if new_worker_pos is not None:
        state = tzd.assoc_in(state, ['worker', 'pos'], new_worker_pos)

    # without joining on example 2, 500 iters: 22.36s
    # with joining on example 2, 500 iters: 3.87s
    new_wrapped_shells = state['wrapped_shells'] + [_pt2shell(state['worker']['pos'])]
    wrapped = shapely.ops.unary_union([shapely.geometry.Polygon(sh)
                                       for sh in new_wrapped_shells])
    wrapped_simple = wrapped.simplify(0.01)
    return tzd.assoc(state, 'wrapped_shells', _polygon2shells(wrapped_simple))
Ejemplo n.º 21
0
def _predict_action(state):
    mine = shapely.geometry.Polygon(state['desc']['mine_shell'])
    obstacles = [shapely.geometry.Polygon(sh) for sh in state['desc']['obstacle_shells']]
    obstacle = shapely.ops.unary_union(obstacles)
    situable = mine.difference(obstacle)
    wrappeds = [shapely.geometry.Polygon(sh) for sh in state['wrapped_shells']]
    wrapped = shapely.ops.unary_union(wrappeds)
    not_wrapped = situable.difference(wrapped)

    if not_wrapped.area < 1.0:
        return None, state

    last_move = state.get('last_move', 'W')
    for move in [last_move, 'W', 'S', 'A', 'D']:
        proj = _move_projection_center(state['worker']['pos'], move)
        if not_wrapped.contains(proj):
            return move, tzd.dissoc(state, 'path_pts_to_not_wrapped')

    if not state.get('path_pts_to_not_wrapped'):
        target_tile = tzf.thread_first(not_wrapped.representative_point(),
                                       _shapely_point2pt,
                                       _snap_to_tile)
        print('Finding shortest path from tile {} to {}'.format(state['worker']['pos'], target_tile))

        if tzd.get_in(['cache', 'incidence_m'], state) is None:
            incidence_m = _incidence_matrix(situable)
            state = tzd.assoc_in(state, ['cache', 'incidence_m'], incidence_m)
        else:
            incidence_m = state['cache']['incidence_m']

        target_vertex_ind = _incidence_ind(target_tile[0], target_tile[1], x_size=math.ceil(situable.bounds[2]))
        path_dists, path_predecessors = sp.sparse.csgraph.shortest_path(csgraph=incidence_m,
                                                                        directed=False,
                                                                        return_predecessors=True,
                                                                        unweighted=True,
                                                                        indices=target_vertex_ind)
        start_vertex_ind = _incidence_ind(state['worker']['pos'][0],
                                          state['worker']['pos'][1],
                                          x_size=math.ceil(situable.bounds[2]))

        path_inds = _path_inds(path_predecessors, start_vertex_ind)
        path_pts = [_incidence_pt(ind, x_size=math.ceil(situable.bounds[2]))
                    for ind in path_inds]
        print('Found path: {}'.format(path_pts))
        state = tzd.assoc(state, 'path_pts_to_not_wrapped', path_pts)

    path_move = _projection_pt_move(state['worker']['pos'], state['path_pts_to_not_wrapped'][0])
    if path_move is not None:
        return path_move, tzd.update_in(state, ['path_pts_to_not_wrapped'], lambda p: p[1:])

    return 'Z', state
Ejemplo n.º 22
0
def filterFeatures(match):
    usableKeys = ['players', 'radiant_win', 'hero_id', 'player_slot']
    isUsable = lambda k: k in usableKeys
    toplvlFiltered = keyfilter(isUsable, match)

    filteredPlayers = []
    for player in toplvlFiltered['players']:
        side = decideSide(player['player_slot'])
        playerData = assoc(keyfilter(isUsable, player), 'team', side)
        playerData = dissoc(playerData, 'player_slot')
        filteredPlayers.append(playerData)
    toplvlFiltered['players'] = filteredPlayers

    return toplvlFiltered
Ejemplo n.º 23
0
def test_assoc():
    assert assoc({}, "a", 1) == {"a": 1}
    assert assoc({"a": 1}, "a", 3) == {"a": 3}
    assert assoc({"a": 1}, "b", 3) == {"a": 1, "b": 3}

    # Verify immutability:
    d = {'x': 1}
    oldd = d
    assoc(d, 'x', 2)
    assert d is oldd

    # Test object support:
    c = C()
    assert assoc(c, "a", 1).__dict__ == {"a": 1}
    c.a = 1
    assert assoc(c, "a", 3).__dict__ == {"a": 3}
    assert assoc(c, "b", 3).__dict__ == {"a": 1, "b": 3}

    # Verify immutability:
    o = C()
    o.x = 1
    assoc(o, 'x', 2)
    assert o.x == 1
Ejemplo n.º 24
0
 async def update(self, dto: UpdateTodoItemDto, id_: int):
     item = get_in([id_], self.items)
     if not item:
         return None
     self.items, new_item = pipe(
         (item, dto),
         lambda items: {
             **items[0].dict(),
             **items[1].dict(exclude_defaults=True)
         },
         lambda data: TodoItem(**data),
         lambda todo: (assoc(self.items, id_, todo), todo),
     )
     return new_item
Ejemplo n.º 25
0
 def test_success(self, mock_gsgr):
     rows = [
         {'created_at': '0', 'desired': 'some', 'status': 'ACTIVE'},
         {'desired': 'some', 'status': 'ACTIVE'},  # no created_at
         {'created_at': '0', 'status': 'ACTIVE'},  # no desired
         {'created_at': '0', 'desired': 'some'},   # no status
         {'created_at': '0', 'desired': 'some', 'status': 'DISABLED'},
         {'created_at': '0', 'desired': 'some', 'deleting': 'True', },
         {'created_at': '0', 'desired': 'some', 'status': 'ERROR'}]
     rows = [assoc(row, "tenantId", "t1") for row in rows]
     mock_gsgr.return_value = succeed(rows)
     results = self.successResultOf(get_scaling_groups("client"))
     self.assertEqual(results, {"t1": [rows[0], rows[3]]})
     mock_gsgr.assert_called_once_with(
         "client", props=["status", "deleting", "created_at"])
Ejemplo n.º 26
0
def set_desired_to_actual_group(dispatcher, cass_client, group):
    """
    Set group's desired to current number of servers in the group
    """
    try:
        res_eff = get_all_launch_server_data(
            group["tenantId"], group["groupId"], datetime.utcnow())
        eff = Effect(TenantScope(res_eff, group["tenantId"]))
        resources = yield perform(dispatcher, eff)
        actual = active_servers_count(resources["servers"])
        print("group", group, "setting desired to ", actual)
        yield cass_client.execute(
            ('UPDATE scaling_group SET desired=:desired WHERE '
             '"tenantId"=:tenantId AND "groupId"=:groupId'),
            assoc(group, "desired", actual), DEFAULT_CONSISTENCY)
    except Exception as e:
        print("Couldn't set group {} to {} due to {}".format(group, actual, e))
Ejemplo n.º 27
0
def mark_deleted_servers(old, new):
    """
    Given dictionaries containing old and new servers, return a list of all
    servers, with the deleted ones annotated with a status of DELETED.

    :param list old: List of old servers
    :param list new: List of latest servers
    :return: List of updated servers
    """
    def sdict(servers):
        return {s['id']: s for s in servers}

    old = sdict(old)
    new = sdict(new)
    deleted_ids = set(old.keys()) - set(new.keys())
    for sid in deleted_ids:
        old[sid] = assoc(old[sid], "status", "DELETED")
    return merge(old, new).values()
Ejemplo n.º 28
0
def mark_deleted_servers(old, new):
    """
    Given dictionaries containing old and new servers, return a list of all
    servers, with the deleted ones annotated with a status of DELETED.

    :param list old: List of old servers
    :param list new: List of latest servers
    :return: List of updated servers
    """

    def sdict(servers):
        return {s['id']: s for s in servers}

    old = sdict(old)
    new = sdict(new)
    deleted_ids = set(old.keys()) - set(new.keys())
    for sid in deleted_ids:
        old[sid] = assoc(old[sid], "status", "DELETED")
    return merge(old, new).values()
Ejemplo n.º 29
0
def test_trezor_sign_transaction(mock_trezor, mock_account):
    trezor_signer = TrezorSigner()
    transaction_dict = assoc(TRANSACTION_DICT,
                             key='from',
                             value=mock_account.address)
    signed_transaction = trezor_signer.sign_transaction(
        transaction_dict=transaction_dict)
    assert isinstance(signed_transaction, HexBytes)

    # assert valid deserializable transaction
    transaction = Transaction.from_bytes(signed_transaction)

    # Confirm the integrity of the sender and recipient address
    failure_message = 'WARNING: transaction "to" field was mutated'
    sender_checksum_address = to_checksum_address(transaction.to)
    assert sender_checksum_address != mock_account.address, failure_message
    assert sender_checksum_address == TRANSACTION_DICT['to'], failure_message
    assert sender_checksum_address == transaction_dict['to']  # positive
    assert sender_checksum_address != mock_account.address  # negative
Ejemplo n.º 30
0
def blast2summary_dict(db, blastpath, ete2_db):  # (Path, Path) -> list[dict]
    """Reading in a blast output file, lookup all seqids to get taxids with a single blastdbcmd.
  Then, lookup the taxonomy using ETE2 via the taxid, and add that info to the blast info."""
    # rows = csv.DictReader(open(blastpath), delimiter='\t',fieldnames=[SEQID, 'sseqid','pid', 'alnlen','gapopen','qstart','qend','sstart','send','evalue','bitscore'])
    rows = csv.DictReader(open(blastpath),
                          delimiter='\t',
                          fieldnames=blast_columns)
    rows = list(rows)
    seqids = map(get('sseqid'), rows)
    taxids = get_taxid(db, seqids)

    def get_gi(s):
        fields = s.split('|')
        if len(fields) > 1:
            return fields[1]
        else:
            raise ValueError("Seq ID %s is missing GI fields and '|'" % s)

    gis = imap(get_gi, seqids)
    #TODO: change matches to use something unique--not the TAXID! actually, why is it a dict
    # in the first place? it should be a list of dictionaries, and then map over
    # the dictionaries to merge them with the taxonomy info
    # this will replace the lines:
    # matches = . . .
    # items = . . .
    #matches = dict((taxids[gi], row) for gi, row in zip(gis,rows) if gi in taxids)
    ncbi = NCBITaxa(
        ete2_db)  # downloads database and creates SQLite database if needed
    # items = dictmap(lambda tid,row: merge(row, taxonomy(ncbi, tid)), matches)
    matches = [
        assoc(row, 'taxid', taxids[gi]) for gi, row in zip(gis, rows)
        if gi in taxids
    ]
    items = [merge(row1, taxonomy(ncbi, row1['taxid'])) for row1 in matches]
    res = imap(partial(keyfilter, csv_fields.__contains__), items)
    return res
Ejemplo n.º 31
0
def get_metric(name, k):
    return {name: assoc(METRICS[name], 'k', k)}
Ejemplo n.º 32
0
 def cat(self, item: Tuple[A, B]):
     return Map(dicttoolz.assoc(self, *item))
Ejemplo n.º 33
0
    items = ujson.load(args.input_metadata_file)

    print(f'loading stats file..', file=sys.stderr)
    stats = ujson.load(args.input_stat_file)

    # create dictionary of stats

    zero_vector = return_zero_vector(stats, args.label)


    # composing from the right will make this code more readable
    compute_labels = compose(partial(compute_label, zero_vector, args.label))

    print('[')
    prev = None
    for x in map(lambda item: assoc(item, 'label', compute_labels(item)), items):
        if not x['label']:
            continue
        if prev is not None:
            print(',')
        print(json.dumps(x, indent=2, separators=(',', ': ')), end='')
        prev = x
    print('\n]')







Ejemplo n.º 34
0
 def run(f, *args, **kwargs):
     result = f(*args, **kwargs)
     result.addCallback(lambda x: (x[0], assoc(x[1], 'locked', True)))
     return result
Ejemplo n.º 35
0
 def run(f, *args, **kwargs):
     result = f(*args, **kwargs)
     result.addCallback(
         lambda x: (x[0], assoc(x[1], 'locked', True)))
     return result
Ejemplo n.º 36
0
 async def replace(self, dto: CreateTodoItemDto, id_: int):
     if not get_in([id_], self.items):
         return None
     self.items = assoc(self.items, id_,
                        TodoItem(id=id_, msg=dto.msg, is_done=dto.is_done))
     return get_in([id_], self.items)
Ejemplo n.º 37
0
Archivo: map.py Proyecto: tek/tryp.py
 def cat(self, item: Tuple[A, B]):
     return Map(dicttoolz.assoc(self, *item))
Ejemplo n.º 38
0
def http_json_rpc(token=None,
                  uri=None,
                  verify_ssl=None,
                  json_data=None,
                  workspace=None,
                  proxies=None,
                  fire_and_forget=False,
                  check_allow_transmit=None):
    """
    Sends a json_rpc request over http.

    Returns:
        dict: The decoded response from the server.
    Args:
        token (str): oauth2 token
        uri (str): the server uri to connect to
        verify_ssl (bool): passed to requests. flag to check the server's certs, or not.
        json_data (json-encodable object): the payload to send
        workspace (int): workspace to connect to. If None, let the server connect to the default workspace for your user or token
        proxies (dict): Dictionary mapping protocol or protocol and hostname to the URL of the proxy.
        fire_and_forget (bool,optional): return from the method after the request is sent (not wait for response)
        check_allow_transmit (callable, optional): For use in retry, callable method to see if retries are still valid to send
    """
    def auth_header():
        if workspace:
            return "Bearer_{}_ws{}".format(token, workspace)
        else:
            return "Bearer_{}".format(token)

    def streamable():
        if json_data and json_data.get('method') in STREAM_ENDPOINTS:
            return True
        return False

    if token:
        headers = {
            "Authorization": auth_header(),
            "Content-Type": "application/json",
        }
    else:
        headers = {
            "Content-Type": "application/json",
        }
    payload = json.dumps(assoc(json_data, 'id', 0),
                         default=unsupported_object_json_encoder,
                         option=json.OPT_NAIVE_UTC | json.OPT_NON_STR_KEYS)

    def get_session():
        if fire_and_forget:
            return FuturesSession()
        return requests.sessions.Session()

    with get_session() as session:
        if streamable():
            retry = 0
        else:
            retry = RPCRetry(check_allow_transmit=check_allow_transmit)
        adapter = HTTPAdapter(max_retries=retry)
        session.mount('http://', adapter)
        session.mount('https://', adapter)

        if streamable():
            handle, file_name = tempfile.mkstemp(dir=download_folder,
                                                 prefix="download_",
                                                 suffix=".tmp")
            os.close(
                handle
            )  # Can't control the access mode, so close this one and open another.
            with open(file_name, 'wb') as tmp_file:
                with session.post(uri,
                                  headers=headers,
                                  data=payload,
                                  verify=verify_ssl,
                                  proxies=proxies,
                                  allow_redirects=False,
                                  stream=True) as response:
                    response.raise_for_status()
                    try:
                        result = response.json()
                        return result
                    except Exception:  # JSONDecodeError: Should be this, but which library? json or simplejson - depends what is installed
                        pass
                    for chunk in response.iter_content(chunk_size=None):
                        tmp_file.write(chunk)
            return file_name
        elif fire_and_forget:
            r_future = session.post(uri,
                                    headers=headers,
                                    data=payload,
                                    verify=verify_ssl,
                                    proxies=proxies,
                                    allow_redirects=False)

            # Adding a callback that will raise an exception if there was a problem with the request
            def on_request_complete(request_future):
                try:
                    response = request_future.result()
                    response.raise_for_status()
                except:
                    print(f'Exception for method {json_data.get("method")}')
                    raise

            r_future.add_done_callback(on_request_complete)
        else:
            try:
                response = session.post(uri,
                                        headers=headers,
                                        data=payload,
                                        verify=verify_ssl,
                                        proxies=proxies,
                                        allow_redirects=False)
                response.raise_for_status()
                result = response.json()
                return result
            except Exception as e:
                print(f'Exception for method {json_data.get("method")}')
                raise