def saveevent(request):
    try:
        if request.method != 'POST':
            raise Exception('This service requires POST requests.')
        __save_event_helper(request)
        # Return the file list as JSON.
        return HttpResponse(ApiResponse(success=True, message='Event saved successfully!').to_json(), mimetype="application/json")
    except Exception as ex:
        # Respond with error as JSON.
        return HttpResponse(ApiResponse.from_exception(ex).to_json(), mimetype="application/json")
def viewevents(request):
    try:
        events = []
        allowedKeys = []
        tsMax = 0
        tsMin = sys.maxint
        hasFilter = False
        queryFilter = {}
        # If datetime range is supplied, use that, else return all events.
        timestampStart = timestampEnd = None
        if 'timestamp_start' in request.GET and request.GET['timestamp_start']:
            timestampStart = datetime.datetime.fromtimestamp(float(request.GET['timestamp_start']))
        if 'timestamp_end' in request.GET and request.GET['timestamp_end']:
            timestampEnd = datetime.datetime.fromtimestamp(float(request.GET['timestamp_end']))
        # If tag is supplied, use that, else return all events.
        eventTag = None
        if 'event_tag' in request.GET and request.GET['event_tag']:
            eventTag = __sanitize_input(request.GET['event_tag'])
            queryFilter['event_tags'] = eventTag
            hasFilter = True
        # Get heartbeat events.
        if timestampStart != None and timestampEnd != None:
            # eventQuery = Event.objects(__raw__={'event_timestamp': {"$gte": timestampStart, "$lt": timestampEnd}})
            queryFilter['event_timestamp'] = {"$gte": timestampStart, "$lt": timestampEnd}
            hasFilter = True
        # If we have a filter, use a raw query.
        if hasFilter:
            eventQuery = Event.objects(__raw__=queryFilter)
        else:
            eventQuery = Event.objects()
        for event in eventQuery.order_by('+event_timestamp'):
        # for event in Event.objects.order_by('+event_timestamp'):
            eventDict = event.to_dict()
            if eventDict['event_timestamp'] > tsMax:
                tsMax = eventDict['event_timestamp']
            if eventDict['event_timestamp'] < tsMin:
                tsMin = eventDict['event_timestamp']
            events.append(eventDict)
        # Get allowed keys.
        for allowedKey in Allowed_event_key.objects.order_by('+key'):
            allowedKeys.append(allowedKey.key)
        # Return the file list as JSON.
        response = {
                    'heartbeat_events': events,
                    'timestamp_max': tsMax,
                    'timestamp_min': tsMin,
                    'allowed_event_keys': allowedKeys
                    };
        ser = json.dumps(response, default=json_util.default)
        return HttpResponse(ser, mimetype="application/json")
    except Exception as ex:
        # Respond with error as JSON.
        return HttpResponse(ApiResponse.from_exception(ex).to_json(), mimetype="application/json")
def viewtwitterevents(request):
    '''
    Query the Twitter API for a given hashtag and date range, using these Twitter-Python bindings:
    https://github.com/ckoepp/TwitterSearch
    '''
    try:
        # Hashtag is required.
        hashtag = None
        query = None
        if 'hashtag' in request.GET and len(request.GET['hashtag']) > 0:
            hashtag = __sanitize_input(request.GET['hashtag'])
        else:
            raise Exception('Hashtag is required!')
        # If datetime range is supplied, use that, else return all events.
        dtStart = dtEnd = None
        strStart = strEnd = None
        if 'date_start' in request.GET and request.GET['date_start']:
            strStart = request.GET['date_start']
            dtStart = datetime.datetime.strptime(strStart, "%Y-%m-%d").date()
        if 'date_end' in request.GET and request.GET['date_end']:
            strEnd = request.GET['date_end']
            dtEnd = datetime.datetime.strptime(strEnd, "%Y-%m-%d").date()
        # Check for teh "no tweet cap" param.
        noTweetCap = False
        if 'no_tweet_cap' in request.GET and request.GET['no_tweet_cap'] == 'true':
            noTweetCap = True
        # First, check the cache for the Twitter API result.
        cacheKey = hashtag + '_' + strStart + '_' + strEnd
        secondaryCacheFilePath = settings.SECONDARY_CACHE_DIRECTORY + cacheKey + '.json' 
        response = cache.get(cacheKey)
        if response == None:
            if os.path.isfile(secondaryCacheFilePath):
                with open(secondaryCacheFilePath, "r") as textFile:
                    response = json.load(textFile)
        if response == None or len(response) == 0:
            totalEventCnt = 0
            totalEventCntThresh = 0
            events = []
            tsMax = 0
            tsMin = sys.maxint
            # Authenticate with Twitter API.
            tso = TwitterSearchOrder()
            tso.setLanguage('en')
            tso.setCount(100)
            tso.setIncludeEntities(False)
            tso.setResultType('recent')
            # Create a TwitterSearch object with our secret tokens
            twitterSearch = TwitterSearch(
                consumer_key=settings.TWITTER_CONSUMER_KEY,
                consumer_secret=settings.TWITTER_CONSUMER_SECRET,
                access_token=settings.TWITTER_ACCESS_TOKEN,
                access_token_secret=settings.TWITTER_ACCESS_TOKEN_SECRET
             )
            twitterSearch.authenticate()
            # Construct and run the twitter search query.
            if dtStart != None and dtEnd != None:
                query = hashtag
                tso.setUntil(dtEnd)
            else:
                query = hashtag
            tso.setKeywords([query])
            maxId = 0
            tweetCnt = MAX_INT32
            doLoop = True
            # Page through the Twitter search API results until we either get no results or we arrive at the start date.
            while (doLoop):
                # Exit conditions.
                if not doLoop:
                    break;
                if tweetCnt == 0:
                    break
                if maxId > 0:
                    tso.setMaxID(maxId)
                    tso.setKeywords([hashtag])
                # Reset counter.
                tweetCnt = 0
                # Reset last tweet.
                lastTweet = None
                # Create an additional retry loop for when Twitter refuses the next page.
                try:
                    for tweet in twitterSearch.searchTweetsIterable(tso):
                        dt = __getDateFromTweetCreatedAt(tweet['created_at'])
                        if dt.date() < dtStart:
                            doLoop = False
                            break;
                        ts = time.mktime(dt.timetuple())
                        if ts > tsMax:
                            tsMax = ts
                        if ts < tsMin:
                            tsMin = ts
                        lastTweet = tweet
                        # Copy search results to the Event list.
                        events.append(
                                      {
                                       'event_key': hashtag,
                                       'event_datetime': str(tweet['created_at']),
                                       'event_timestamp': ts,
                                       'event_value': tweet['text'],
                                       'event_tags': [hashtag],
                                       'raw_data': tweet
                                       }
                                      )
                        # Increment counter.
                        tweetCnt += 1
                        totalEventCnt += 1
                        totalEventCntThresh += 1
                        if totalEventCntThresh >= 1000:
                            print('Processed ' + str(totalEventCnt) + ' tweets.')
                            totalEventCntThresh = 0
                        # Exit conditions:
                        if not noTweetCap and totalEventCnt >= settings.TWITTER_SEARCH_API_TWEET_CAP:
                            doLoop = False
                            break
                except Exception as ex:
                    # Wait and then try last request again.
                    sleepDurationSeconds = 900  # 15 minutes.
                    print("Got exception when querying Twitter search API: " + ex.message)
                    # Save the portion of the events JSON.
                    #with open('C:/Dev/labs-heartbeat-visualization-framework/json-backup/' + cacheKey + '-part-' + str(totalEventCnt) + '.json', "w") as textFile:
                    with open(settings.SECONDARY_CACHE_DIRECTORY + cacheKey + '-part-' + str(totalEventCnt) + '.json', "w") as textFile:
                        textFile.write(json.dumps(events, default=json_util.default))
                    print("Sleeping for " + str(sleepDurationSeconds) + " seconds.")
                    time.sleep(sleepDurationSeconds)
                    # Reset the tweet counter to make sure we don't artificually trigger the loop exit condition.
                    tweetCnt = -1
                    print("Time to wake up and try again from maxId = " + str(maxId))
                if lastTweet != None:
                    maxId = long(lastTweet['id_str'])
            # Return the file list as JSON.
            response = {
                        'heartbeat_events': events,
                        'timestamp_max': tsMax,
                        'timestamp_min': tsMin,
                        'allowed_event_keys': [hashtag]
                        };
            # Now cache response.
            cache.set(cacheKey, response, 43200)  # 12 hours
            # Finally, store the events in a text file (TODO: I may remove this later).
            with open(secondaryCacheFilePath, "w") as textFile:
                textFile.write(json.dumps(response, default=json_util.default))
        ser = json.dumps(response, default=json_util.default)
        return HttpResponse(ser, mimetype="application/json")
    except Exception as ex:
        # Respond with error as JSON.
        return HttpResponse(ApiResponse.from_exception(ex).to_json(), mimetype="application/json")