def lock(request): try: key = request.GET["key"].strip() court = request.GET["court"].strip() casenum = request.GET["casenum"].strip() one_per_uploader = 1 if request.GET.get('one_per_uploader') else 0 except KeyError: # Fail. Missing required arguments. return HttpResponse("0<br>Missing arguments.") authquery = Uploader.objects.filter(key=key) # Authenticate the uploader. try: uploader = authquery[0] except IndexError: # Fail. No auth key match. return HttpResponse("0<br>Authentication failed.") else: uploaderid = uploader.id # Try to grab the lock. lock_nonce, errmsg = BucketLockManager.get_lock(court, casenum, uploaderid, one_per_uploader) if lock_nonce: return HttpResponse("1<br>%s" % lock_nonce) else: return HttpResponse("0<br>%s" % errmsg)
def lock(request): try: key = request.GET["key"].strip() court = request.GET["court"].strip() casenum = request.GET["casenum"].strip() one_per_uploader = 1 if request.GET.get('one_per_uploader') else 0 except KeyError: # Fail. Missing required arguments. return HttpResponse("0<br>Missing arguments.") authquery = Uploader.objects.filter(key=key) # Authenticate the uploader. try: uploader = authquery[0] except IndexError: # Fail. No auth key match. return HttpResponse("0<br>Authentication failed.") else: uploaderid = uploader.id # Try to grab the lock. lock_nonce, errmsg = BucketLockManager.get_lock(court, casenum, uploaderid, one_per_uploader) if not lock_nonce or lock_nonce == 'bigdoc': return HttpResponse("0<br>%s" % errmsg) else: return HttpResponse("1<br>%s" % lock_nonce)
def _cron_put_pickles(): # Get uploader credentials. uploader_query = Uploader.objects.filter(key=AUTH_KEY) try: RECAP_UPLOADER_ID = uploader_query[0].id except IndexError: print " could not find uploader with key=%s" % AUTH_KEY return # Get all ready pickles query = PickledPut.objects.filter(ready=1, processing=0) \ .order_by('-filename') # Set all ready pickles to the processing state #for ppentry in query: # ppentry.processing = 1 # ppentry.save() # Keep track of court, casenum. Only lock and unlock once for each case. curr_court = None curr_casenum = None lock_nonce = None # Process pickles one at a time. for ppentry in query: filename = ppentry.filename ppmeta = IACommon.get_meta_from_filename(filename) court = ppmeta["court"] casenum = ppmeta["casenum"] # Make sure we have the lock for this case. if curr_court == court and curr_casenum == casenum: # Same case as the previous ppentry. if not lock_nonce: # Skip if we don't have the lock already. # ppentry.processing = 0 # ppentry.save() continue # Otherwise, we already have the lock, so continue. else: # Switching to a new case. # Drop the current lock (from previous case), if necessary. if curr_court and curr_casenum: dropped, errmsg = BucketLockManager.drop_lock(curr_court, curr_casenum, RECAP_UPLOADER_ID, nolocaldb=1) if not dropped: print " %s.%s someone stole my lock?" % \ (court, unicode(casenum)) # Grab new lock curr_court = court curr_casenum = casenum lock_nonce, errmsg = BucketLockManager.get_lock(court, casenum, RECAP_UPLOADER_ID, one_per_uploader=1) if not lock_nonce: print " Passing on %s.%s: %s" % (court, casenum, errmsg) # We don't have a lock, so don't drop the lock in the next loop curr_court = None curr_casenum = None continue # We'll always have the lock here. # Unpickle the object obj, unpickle_msg = unpickle_object(filename) # Two cases for the unpickled object: Request or DocketXML if obj and ppentry.docket: _cron_process_docketXML(obj, ppentry) elif obj: # Dispatch the PUT request _cron_process_PDF(obj, ppentry) else: # Unpickling failed # If unpickling fails, it could mean that another cron job # has already finished this PP - not sure how to distinguish this print " %s %s (Another cron job completed?)" % (filename, unpickle_msg) # Delete the entry from the DB ppentry.delete() # Delete the pickle file delete_pickle(filename) # Drop last lock if curr_court and curr_casenum: dropped, errmsg = BucketLockManager.drop_lock(curr_court, curr_casenum, RECAP_UPLOADER_ID, nolocaldb=1) if not dropped: print " %s.%s someone stole my lock??" % (court, unicode(casenum))
def _cron_put_pickles(): # Get uploader credentials. uploader_query = Uploader.objects.filter(key=AUTH_KEY) try: RECAP_UPLOADER_ID = uploader_query[0].id except IndexError: print " could not find uploader with key=%s" % AUTH_KEY return # Get all ready pickles query = PickledPut.objects.filter(ready=1, processing=0) \ .order_by('-filename') # Set all ready pickles to the processing state # for ppentry in query: # ppentry.processing = 1 # ppentry.save() # Keep track of court, casenum. Only lock and unlock once for each case. curr_court = None curr_casenum = None lock_nonce = None # Process pickles one at a time. for ppentry in query: filename = ppentry.filename ppmeta = IACommon.get_meta_from_filename(filename) court = ppmeta["court"] casenum = ppmeta["casenum"] # Make sure we have the lock for this case. if curr_court == court and curr_casenum == casenum: # Same case as the previous ppentry. if not lock_nonce: # Skip if we don't have the lock already. # ppentry.processing = 0 # ppentry.save() continue # Otherwise, we already have the lock, so continue. else: # Switching to a new case. # Drop the current lock (from previous case), if necessary. if curr_court and curr_casenum: dropped, errmsg = BucketLockManager.drop_lock( curr_court, curr_casenum, RECAP_UPLOADER_ID, nolocaldb=1) if not dropped: print " %s.%s someone stole my lock?" % \ (court, unicode(casenum)) # Grab new lock curr_court = court curr_casenum = casenum lock_nonce, errmsg = BucketLockManager.get_lock(court, casenum, RECAP_UPLOADER_ID, one_per_uploader=1) if not lock_nonce: print " Passing on %s.%s: %s" % (court, casenum, errmsg) if not lock_nonce or lock_nonce == 'bigdoc': # We don't have a lock, so don't drop the lock in the next loop curr_court = None curr_casenum = None continue # We'll always have the lock here. # Unpickle the object obj, unpickle_msg = unpickle_object(filename) # Two cases for the unpickled object: Request or DocketXML if obj and ppentry.docket: print "Processing docket: %s" % filename _cron_process_docketXML(obj, ppentry) elif obj: # Dispatch the PUT request _cron_process_PDF(obj, ppentry) else: # Unpickling failed # If unpickling fails, it could mean that another cron job # has already finished this PP - not sure how to distinguish this print " %s %s (Another cron job completed?)" % (filename, unpickle_msg) # Delete the entry from the DB ppentry.delete() # Delete the pickle file delete_pickle(filename) # Drop last lock if curr_court and curr_casenum: dropped, errmsg = BucketLockManager.drop_lock(curr_court, curr_casenum, RECAP_UPLOADER_ID, nolocaldb=1) if not dropped: print " %s.%s someone stole my lock??" % (court, unicode(casenum))