Example #1
0
def id_for_memo_list(denormalized_list, output_ref=False):
    if type(denormalized_list) != list:
        raise ValueError("id_for_memo_list cannot work on subclasses of list")

    normalized_list = []

    for e in denormalized_list:
        normalized_list.append(id_for_memo(e, output_ref=output_ref))

    return serialize_object(normalized_list)[0]
Example #2
0
    def make_hash(self, task):
        ''' Create a hash of the task inputs. This uses a serialization library borrowed from
        ipyparallel. If this fails here, then all ipp calls are also likely to fail due to failure
        at serialization.

        Args:
            - task (dict) : Task dictionary from dfk.tasks

        Returns:
            - hash (str) : A unique hash string
        '''

        # Function name TODO: Add fn body later
        t = [serialize_object(task['func_name'])[0],
             serialize_object(task['fn_hash'])[0],
             serialize_object(task['args'])[0],
             serialize_object(task['kwargs'])[0],
             serialize_object(task['env'])[0]]
        x = b''.join(t)
        hashedsum = hashlib.md5(x).hexdigest()
        return hashedsum
Example #3
0
def id_for_memo_dict(denormalized_dict, output_ref=False):
    """This normalises the keys and values of the supplied dictionary.

    When output_ref=True, the values are normalised as output refs, but
    the keys are not.
    """
    if type(denormalized_dict) != dict:
        raise ValueError("id_for_memo_dict cannot work on subclasses of dict")

    keys = sorted(denormalized_dict)

    normalized_list = []
    for k in keys:
        normalized_list.append(id_for_memo(k))
        normalized_list.append(
            id_for_memo(denormalized_dict[k], output_ref=output_ref))
    return serialize_object(normalized_list)[0]
Example #4
0
def id_for_memo_serialize(obj, output_ref=False):
    return serialize_object(obj)[0]