from ContentAnalyticsAPIWrapper import ContentAnalyticsAPI
from datetime import datetime
import sys, time, urllib, os, logging
import HelperMethods as h


secret_key = sys.argv[2]
access_id = sys.argv[3]
api_address= "http://127.0.0.1/rest_api/"
ca = ContentAnalyticsAPI(access_id, secret_key, api_address)
log_file = "/tmp/crawl_generate_results.log"
logging.basicConfig(filename=log_file,level=logging.DEBUG, format="%(levelname)-5s [%(asctime)s] %(message)s")

batch_id = int(sys.argv[1])
print(str(datetime.now()) + ": Start batch " + str(batch_id))

response = ca.get_batch_crawl_status_from_id(batch_id)

if int(response["queued"]) == 0:
	h.crawl_batch(batch_id, ca)
	#h.process_batch(batch_id, ca)
	#h.call_zabbix_sender(batch_id, 0)
else:
	logging.error("Already crawling/generating results for this batch. Exiting.")
	h.call_zabbix_sender(batch_id, 1)