def test_valid_query_cases_response_unavailable_doc_currently_uploading(self):
     self.available_doc.available = 0
     self.available_doc.save()
     ppquery = PickledPut(filename=IACommon.get_docketxml_name('nysd', '1234'))
     ppquery.save()
     response = self.client.post('/recap/query_cases/', {'json': simplejson.dumps(self.valid_params)})
     self.assertEquals("{}", response.content)
     PickledPut.objects.all().delete()
    def test_valid_query_cases_response_old_doc_currently_uploading(self):
        self.available_doc.available = 0
        two_days_ago = datetime.datetime.now() - datetime.timedelta(2)
        self.available_doc.modified= two_days_ago
        self.available_doc.save()

        ppquery = PickledPut(filename=IACommon.get_docketxml_name('nysd', '1234'))
        ppquery.save()
        response = self.client.post('/recap/query_cases/', {'json': simplejson.dumps(self.valid_params)})
        
        output= simplejson.loads(response.content)

        self.assertEquals(2, len(output))
        self.assertEquals(self.available_doc.lastdate.strftime("%m/%d/%y"), output['timestamp'])
        self.assertEquals(IACommon.get_dockethtml_url('nysd', '1234'), output['docket_url'])
        PickledPut.objects.all().delete()
Exemple #3
0
def put_file(filebits, court, casenum, docnum, subdocnum, metadict={}):
    """ PUT the file into a new Internet Archive bucket. """

    request = IACommon.make_pdf_request(filebits, court, casenum, docnum,
                                        subdocnum, metadict)

    # If this file is already scheduled, drop this. # TK: what we want?
    filename = IACommon.get_pdfname(court, casenum, docnum, subdocnum)

    query = PickledPut.objects.filter(filename=filename)
    if query:
        logging.info("put_file: same file already pickled. %s" % filename)
        return "IA PUT failed: the same file is already in the pickle bucket."

    # Add a PickledPut DB entry to schedule the PUT, not yet ready
    ppentry = PickledPut(filename=filename)

    # Fix a race case?
    try:
        ppentry.save()
    except IntegrityError:

        logging.info("put_file: same file already pickled. %s" % filename)
        return "IA PUT failed: the same file is already in the pickle bucket."

    # Pickle the request object into the jar
    pickle_success, message = pickle_object(request, filename)

    if pickle_success:
        # PickledPut now ready for processing.
        ppentry.ready = 1
        ppentry.save()
        logging.info("put_file: ready. %s" % filename)
    else:
        # Could not pickle object, so remove from DB
        logging.warning("put_file: could not pickle PDF. %s" % filename)
        ppentry.delete()

    return message
def put_file(filebits, court, casenum, docnum, subdocnum, metadict={}):
    """ PUT the file into a new Internet Archive bucket. """

    request = IACommon.make_pdf_request(filebits, court, casenum,
                                        docnum, subdocnum, metadict)

    # If this file is already scheduled, drop this. # TK: what we want?
    filename = IACommon.get_pdfname(court, casenum, docnum, subdocnum)

    query = PickledPut.objects.filter(filename=filename)
    if query:
        logging.info("put_file: same file already pickled. %s" % filename)
        return "IA PUT failed: the same file is already in the pickle bucket."

    # Add a PickledPut DB entry to schedule the PUT, not yet ready
    ppentry = PickledPut(filename=filename)

    # Fix a race case?
    try:
        ppentry.save()
    except IntegrityError:

        logging.info("put_file: same file already pickled. %s" % filename)
        return "IA PUT failed: the same file is already in the pickle bucket."


    # Pickle the request object into the jar
    pickle_success, message = pickle_object(request, filename)

    if pickle_success:
        # PickledPut now ready for processing.
        ppentry.ready = 1
        ppentry.save()
        logging.info("put_file: ready. %s" % filename)
    else:
        # Could not pickle object, so remove from DB
        logging.warning("put_file: could not pickle PDF. %s" % filename)
        ppentry.delete()

    return message
def do_me_up(docket):
    ''' Download, merge and update the docket with IA. '''
    # Pickle this object for do_me_up by the cron process.

    court = docket.get_court()
    casenum = docket.get_casenum()

    docketname = IACommon.get_docketxml_name(court, casenum)

    # Check if this docket is already scheduled to be processed.
    query = PickledPut.objects.filter(filename=docketname)

    try:
        ppentry = query[0]
    except IndexError:
        # Not already scheduled, so schedule it now.
        ppentry = PickledPut(filename=docketname, docket=1)

        try:
            ppentry.save()
        except IntegrityError:
            # Try again.
            do_me_up(docket)
        else:
            # Pickle this object.
            pickle_success, msg = IA.pickle_object(docket, docketname)

            if pickle_success:
                # Ready for processing.
                ppentry.ready = 1
                ppentry.save()

                logging.info("do_me_up: ready. %s" % (docketname))
            else:
                # Pickle failed, remove from DB.
                ppentry.delete()
                logging.error("do_me_up: %s %s" % (msg, docketname))

    else:
        # Already scheduled.
        # If there is a lock for this case, it's being uploaded. Don't merge now
        locked = BucketLockManager.lock_exists(court, casenum)
        if ppentry.ready and not locked:
            # Docket is waiting to be processed by cron job.

            # Revert state back to 'not ready' so we can do local merge.
            ppentry.ready = 0
            ppentry.save()

            # Fetch and unpickle the waiting docket.
            prev_docket, unpickle_msg = IA.unpickle_object(docketname)

            if prev_docket:

                # Do the local merge.
                prev_docket.merge_docket(docket)

                # Pickle it back
                pickle_success, pickle_msg = \
                    IA.pickle_object(prev_docket, docketname)

                if pickle_success:
                    # Merged and ready.
                    ppentry.ready = 1
                    ppentry.save()
                    logging.info("do_me_up: merged and ready. %s" %(docketname))
                else:
                    # Re-pickle failed, delete.
                    ppentry.delete()
                    logging.error("do_me_up: re-%s %s" % (pickle_msg,
                                                          docketname))

            else:
                # Unpickle failed
                ppentry.delete()
                IA.delete_pickle(docketname)
                logging.error("do_me_up: %s %s" % (unpickle_msg, docketname))


        # Ignore if in any of the other three possible state...
        #   because another cron job is already doing work on this entity
        # Don't delete DB entry or pickle file.
        elif ppentry.ready and locked:
            pass
            #logging.debug("do_me_up: %s discarded, processing conflict." %
            #              (docketname))
        elif not ppentry.ready and not locked:
            pass
            #logging.debug("do_me_up: %s discarded, preparation conflict." %
            #              (docketname))
        else:
            logging.error("do_me_up: %s discarded, inconsistent state." %
                          (docketname))
def do_me_up(docket):
    """ Download, merge and update the docket with IA. """
    # Pickle this object for do_me_up by the cron process.

    court = docket.get_court()
    casenum = docket.get_casenum()

    docketname = IACommon.get_docketxml_name(court, casenum)

    # Check if this docket is already scheduled to be processed.
    query = PickledPut.objects.filter(filename=docketname)

    try:
        ppentry = query[0]
    except IndexError:
        # Not already scheduled, so schedule it now.
        ppentry = PickledPut(filename=docketname, docket=1)

        try:
            ppentry.save()
        except IntegrityError:
            # Try again.
            do_me_up(docket)
        else:
            # Pickle this object.
            pickle_success, msg = IA.pickle_object(docket, docketname)

            if pickle_success:
                # Ready for processing.
                ppentry.ready = 1
                ppentry.save()

                logging.info("do_me_up: ready. %s" % (docketname))
            else:
                # Pickle failed, remove from DB.
                ppentry.delete()
                logging.error("do_me_up: %s %s" % (msg, docketname))

    else:
        # Already scheduled.
        # If there is a lock for this case, it's being uploaded. Don't merge now
        locked = BucketLockManager.lock_exists(court, casenum)
        if ppentry.ready and not locked:
            # Docket is waiting to be processed by cron job.

            # Revert state back to 'not ready' so we can do local merge.
            ppentry.ready = 0
            ppentry.save()

            # Fetch and unpickle the waiting docket.
            prev_docket, unpickle_msg = IA.unpickle_object(docketname)

            if prev_docket:

                # Do the local merge.
                prev_docket.merge_docket(docket)

                # Pickle it back
                pickle_success, pickle_msg = \
                    IA.pickle_object(prev_docket, docketname)

                if pickle_success:
                    # Merged and ready.
                    ppentry.ready = 1
                    ppentry.save()
                    logging.info(
                        "do_me_up: merged and ready. %s" % (docketname))
                else:
                    # Re-pickle failed, delete.
                    ppentry.delete()
                    logging.error("do_me_up: re-%s %s" % (pickle_msg,
                                                          docketname))

            else:
                # Unpickle failed
                ppentry.delete()
                IA.delete_pickle(docketname)
                logging.error("do_me_up: %s %s" % (unpickle_msg, docketname))


        # Ignore if in any of the other three possible state...
        # because another cron job is already doing work on this entity
        # Don't delete DB entry or pickle file.
        elif ppentry.ready and locked:
            pass
            #logging.debug("do_me_up: %s discarded, processing conflict." %
            #              (docketname))
        elif not ppentry.ready and not locked:
            pass
            #logging.debug("do_me_up: %s discarded, preparation conflict." %
            #              (docketname))
        else:
            logging.error("do_me_up: %s discarded, inconsistent state." %
                          (docketname))