Esempio n. 1
0
File: pgp.py Progetto: grocid/Skier
def _discovery(search_str: str) -> None:
    """
    Begin a discovery for new keys in a search.
    :param search_str: The string to search for.
    :return: None.
    """

    print("Beginning discovery of {}".format(search_str))

    if cache.exists("search-" + search_str + "-timeout") and not cache.exists("search-" + search_str + "timeout-override"):
        # Don't do any discovery until the timeout is up.
        return

    timeout = cfg.config.discovery.timeout

    cache.set("search-" + search_str + "-discovering", "abc")

    keys = gpg.list_keys(keys=[search_str], sigs=True)

    js = json.dumps({"k": keys})
    if cache.exists("search-" + search_str):
        # Set a timeout.
        cache.set("search-" + search_str + "-timeout", "abc")
        # 5 minutes.
        cache.expire("search-" + search_str + "-timeout", 300)
        # Compare the two searches
        if cache.get("search-" + search_str).decode() == js:
            # Nevermind, drop it.
            cache.delete("search-" + search_str + "timeout-override")
            cache.delete("search-" + search_str + "-discovering")
            print("Discovery finished of {}".format(search_str))
            return
        else:
            # Plop it on the cache.
            cache.set("search-" + search_str, js)
            print("Discovery finished of {}".format(search_str))
    else:
        # Unconditionally put it on the cache.
        cache.set("search-" + search_str, js)
        # Set a timeout.
        cache.set("search-" + search_str + "-timeout", "abc")
        # 5 minutes.
        cache.expire("search-" + search_str + "-timeout", 300)
        print("Discovery finished of {}".format(search_str))

    cache.delete("search-" + search_str + "timeout-override")
    cache.delete("search-" + search_str + "-discovering")
Esempio n. 2
0
File: pgp.py Progetto: grocid/Skier
def get_pgp_keyinfo_old(keyid: str) -> keyinfo.KeyInfo:
    """
    Gets a :skier.keyinfo.KeyInfo: object for the specified key.
    :param keyid: The ID of the key to lookup.
    :return: A new :skier.keyinfo.KeyInfo: object for the key.
    """

    # Lookup keyinfo from the cache.
    if cache.exists(keyid + "-keyinfo"):
        return keyinfo.KeyInfo.from_key_listing(json.loads(cache.get(keyid + "-keyinfo").decode()))
    else:
        keys = gpg.list_keys(keys=[keyid], sigs=True)
        if not keys:
            return None
        else:
            # Set the keyinfo on the cache.
            js = json.dumps(keys[0])
            cache.set(keyid + "-keyinfo", js)
            # Set expiration.
            cache.expire(keyid + "-keyinfo", 300)
            key = keyinfo.KeyInfo.from_key_listing(keys[0])
            return key
Esempio n. 3
0
File: pgp.py Progetto: grocid/Skier
def search_through_keys_old(search_str: str) -> list:
    """
    Searches through the keys via ID or UID name.
    :param search_str: The string to search for.
    Examples: '0xBF864998CDEEC2D390162087EB4084E3BF0192D9' for a fingerprint search
              '0x45407604' for a key ID search
              'Smith' for a name search
    :return: A list of :skier.keyinfo.KeyInfo: objects containing the specified keys.
    """
    # Old single-threaded bad cache code
    # Attempt to load data from cache.
    #if cache.exists("search-" + search_str):
    #    data = json.loads(cache.get("search-" + search_str).decode())
    #    keyinfos = []
    #    for key in data['k']:
    #        keyinfos.append(keyinfo.KeyInfo.from_key_listing(key))
    #    return keyinfos
    #else:
    #    keys = gpg.list_keys(keys=[search_str], sigs=True)
    #    # Save onto the cache
    #    js = json.dumps({"k": keys})
    #    cache.set("search-" + search_str, js)
    #    cache.expire("search-" + search_str, 300)
    #    keyinfos = []
    #    for key in keys:
    #        keyinfos.append(keyinfo.KeyInfo.from_key_listing(key))
    #    return keyinfos

    # New multi-threaded good cache code
    # What this does:
    # 1) Attempts to load the key list off the cache
    # If this was successful, it launches a 'discovery' thread, which searches for keys in the keyring and compares them against the current cache version.#
    # 2) If it could not load the key list off the cache, it checks the length of the search string.
    # If it's less than or equal to 4, it goes "f**k you" and sends back a reply saying "nice try but you're going to have to wait for your results"
    # Then it launches a discovery anyway, unless it's already discovering.
    # This prevents people from DoSing the server by searching for 'a' a bunch of times.
    # If it's more than four, it does the discovery itself.
    # This can be tuned in the config, under cfg.config.discovery.length_limit.
    # The timeout between discoveries can also be tuned with cfg.config.discovery.timeout.

    # Load off the cache
    if cache.exists("search-" + search_str):
        data = json.loads(cache.get("search-" + search_str).decode())
        keyinfos = []
        for key in data['k']:
            keyinfos.append(keyinfo.KeyInfo.from_key_listing(key))

        # Launch a discovery thread.
        if not cache.exists("search-" + search_str + "-discovering"):
            discovery_pool.apply_async(_discovery, args=(search_str,))

        return keyinfos, 0
    # Else, begin loading
    else:
        if len(search_str) <= cfg.config.discovery.length_limit:
            # Tough shit son, you're gonna have to wait.
            if not cache.exists("search-" + search_str + "-discovering"):
                discovery_pool.apply_async(_discovery, args=(search_str,))
            return [], -1
        else:
            keys = gpg.list_keys(keys=[search_str], sigs=True)
            # Save onto the cache
            js = json.dumps({"k": keys})
            cache.set("search-" + search_str, js)
            cache.expire("search-" + search_str, 300)
            keyinfos = []
            for key in keys:
                keyinfos.append(keyinfo.KeyInfo.from_key_listing(key))
            return keyinfos, 0