'''
    Given a set of searches that the scraper has created, post each individually to 
    the /createSearch endpoint. spaces_interface.write_search_results should have
    created a new list in each result object of the Digital Ocean URLs
    '''
    search_term_to_id = {}
    print(f"saving {results.length} search terms")
    for term,result in results.iterterm():
        post_result = post_search(result, '192.168.0.1')
        if not post_result:
            raise Exception("failed to post result for term " + term)
        if len(result.urls[GOOGLE]) != len(result.get_datalake_urls(GOOGLE)):
            post_images(post_result["search_id"], GOOGLE, result.get_datalake_urls(GOOGLE))
        else:
            post_images(post_result["search_id"], GOOGLE, result.get_datalake_urls(GOOGLE), result.urls[GOOGLE])
        if len(result.urls[BAIDU]) != len(result.get_datalake_urls(BAIDU)):
            post_images(post_result["search_id"], BAIDU, result.get_datalake_urls(BAIDU))
        else:
            post_images(post_result["search_id"], BAIDU, result.get_datalake_urls(BAIDU), result.urls[BAIDU])
        search_term_to_id[result.combined_term()] = post_result["search_id"]
    return search_term_to_id

if __name__ == "__main__":
    from results import ResultSet, ResultSetList
    import time
    result = ResultSet('bunny', '')
    result.add(['google.com', 'bunnies.io'], GOOGLE)
    result.set_datalake_urls(['datalake.com/google.com', 'datalake.com/bunnies.io'], GOOGLE)
    results = ResultSetList()
    results.add(result)
    save_search_results(results)