def associate_meta_data_to_objects(site, i, court, sha1_hash): audio_file = Audio( source='C', sha1=sha1_hash, case_name=site.case_names[i], download_url=site.download_urls[i], processing_complete=False, ) if site.judges: audio_file.judges = site.judges[i] if site.docket_numbers: audio_file.docket_number = site.docket_numbers[i] docket = Docket( date_argued=site.case_dates[i], case_name=site.case_names[i], court=court, ) return docket, audio_file
def associate_meta_data_to_objects(site, i, court, sha1_hash): audio_file = Audio( source='C', sha1=sha1_hash, case_name=site.case_names[i], date_argued=site.case_dates[i], download_url=site.download_urls[i], processing_complete=False, ) if site.judges: audio_file.judges = site.judges[i] if site.docket_numbers: audio_file.docket_number = site.docket_numbers[i] docket = Docket( case_name=site.case_names[i], court=court, ) return docket, audio_file
def download_and_save(): """This function is run in many threads simultaneously. Each thread runs so long as there are items in the queue. Once an item is found, it's downloaded and saved. The number of items that can be concurrently saved is determined by the number of threads that are running this function. """ while True: item = queue.get() logger.info("Attempting to add item at: %s" % item['url']) try: msg, r = get_binary_content( item['url'], {}, ) except: logger.info("Unable to get item at: %s" % item['url']) queue.task_done() if msg: logger.warn(msg) queue.task_done() sha1_hash = hashlib.sha1(r.content).hexdigest() if Audio.objects.filter(sha1=sha1_hash).exists(): # Simpsons did it! Try the next one. logger.info("Item already exists, moving to next item.") queue.task_done() else: # New item, onwards! logger.info('Adding new document found at: %s' % item['url']) audio_file = Audio( source='H', sha1=sha1_hash, case_name=item['case_name'], date_argued=item['date_argued'], download_url=item['url'], processing_complete=False, ) if item['judges']: audio_file.judges = item['judges'] if item['docket_number']: audio_file.docket_number = item['docket_number'] court = Court.objects.get(pk=item['court_code']) docket = Docket( case_name=item['case_name'], court=court, ) # Make and associate the file object try: cf = ContentFile(r.content) extension = get_extension(r.content) if extension not in ['.mp3', '.wma']: extension = '.' + item['url'].rsplit('.', 1)[1] # See bitbucket issue #215 for why this must be # lower-cased. file_name = trunc(item['case_name'].lower(), 75) + extension audio_file.local_path_original_file.save(file_name, cf, save=False) except: msg = 'Unable to save binary to disk. Deleted document: % s.\n % s' % \ (item['case_name'], traceback.format_exc()) logger.critical(msg) queue.task_done() docket.save() audio_file.docket = docket audio_file.save(index=False) random_delay = random.randint(0, 3600) process_audio_file.apply_async((audio_file.pk, ), countdown=random_delay) logger.info("Successfully added audio file %s: %s" % (audio_file.pk, audio_file.case_name))
def download_and_save(): """This function is run in many threads simultaneously. Each thread runs so long as there are items in the queue. Once an item is found, it's downloaded and saved. The number of items that can be concurrently saved is determined by the number of threads that are running this function. """ while True: item = queue.get() logger.info("Attempting to add item at: %s" % item['url']) try: msg, r = get_binary_content( item['url'], {}, ) except: logger.info("Unable to get item at: %s" % item['url']) queue.task_done() if msg: logger.warn(msg) queue.task_done() sha1_hash = hashlib.sha1(r.content).hexdigest() if Audio.objects.filter(sha1=sha1_hash).exists(): # Simpsons did it! Try the next one. logger.info("Item already exists, moving to next item.") queue.task_done() else: # New item, onwards! logger.info('Adding new document found at: %s' % item['url']) audio_file = Audio( source='H', sha1=sha1_hash, case_name=item['case_name'], date_argued=item['date_argued'], download_url=item['url'], processing_complete=False, ) if item['judges']: audio_file.judges = item['judges'] if item['docket_number']: audio_file.docket_number = item['docket_number'] court = Court.objects.get(pk=item['court_code']) docket = Docket( case_name=item['case_name'], court=court, ) # Make and associate the file object try: cf = ContentFile(r.content) extension = get_extension(r.content) if extension not in ['.mp3', '.wma']: extension = '.' + item['url'].rsplit('.', 1)[1] # See bitbucket issue #215 for why this must be # lower-cased. file_name = trunc(item['case_name'].lower(), 75) + extension audio_file.local_path_original_file.save(file_name, cf, save=False) except: msg = 'Unable to save binary to disk. Deleted document: % s.\n % s' % \ (item['case_name'], traceback.format_exc()) logger.critical(msg) queue.task_done() docket.save() audio_file.docket = docket audio_file.save(index=False) random_delay = random.randint(0, 3600) process_audio_file.apply_async( (audio_file.pk,), countdown=random_delay ) logger.info("Successfully added audio file %s: %s" % ( audio_file.pk, audio_file.case_name))