def consumeItem(item):
    try:        
        if item.finished or item.status != SubmissionQueue.NOT_STARTED:
            raise "Will not process started or finished item."
        
        flagStarted(item)
        print("Consuming %s" % item)
        
        # reanalyzeTimeout = timezone.localtime(timezone.now()) - REANALYZE_EXISTING_FILE_TIMEOUT

        def callback(statusCode):
            item.status = statusCode
            item.save()

        analysisFactory = process.processURL(item.location, callback)
        
        if analysisFactory is not None and analysisFactory.fileModel is not None:
            flagFinished(item)
        else:
            flagFailed(item)
        
    except Exception, err:
        flagFailed(item)
        template = "{0} Arguments:\n{1!r}"
        message = template.format(type(err).__name__, err.args)         
        print("Failed to consume %s: %s" % (item, message))
Esempio n. 2
0
    # then just forward to the queue page, and don't do anything since it's already
    # on our list to get to.
    #
    # @TODO: this doesn't yet support re-analysis of old URLs that haven't been
    # analyzed in a long time.
    qItems = SubmissionQueue.objects.all().filter(location=url).filter(finished=False)
    if qItems.exists():
        return JSONResponse({"message": "Already in processing queue"})
    else:
        sqi = SubmissionQueue(location=url)
        sqi.save()
        return JSONResponse({"message": "Added to queue"})

    # Dead code for now. old stuff.
    try:
        analysisFactory = process.processURL(url)
        serializer = FileModelSerializer(analysisFactory.fileModel)
        return JSONResponse(serializer.data)
    except Exception, e:
        return JSONResponse({"error": str(e)})


def stats(request):
    dict = {}

    dict["files"] = File.objects.count()
    dict["names"] = FileName.objects.count()
    dict["metadata"] = FileMetadata.objects.count()

    return JSONResponse(dict)