def jobsfs_to_mongo(guid, buid, name): """Composed method for resopnding to a guid update.""" assert re.match(r'^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$', guid.upper()), \ "%s is not a valid guid" % guid assert re.match(r'^\d+$', str(buid)), "%s is not a valid buid" % buid logger.info("Updating Job Source %s", guid) # Make the BusinessUnit and Company create_businessunit(buid) bu = BusinessUnit.objects.get(id=buid) bu.title = name bu.save() add_company(bu) # Lookup the jobs, filter then, transform them, and then load the jobs zf = get_jobsfs_zipfile(guid) jobs = get_jobs_from_zipfile(zf, guid) jobs = filter_current_jobs(jobs, bu) jobs = (hr_xml_to_json(job, bu) for job in jobs) jobs = list(jobs) for job in jobs: job['guid'] = job['guid'].lower() if len(jobs) > 0: collection = connect_db().db.jobs bulk = collection.initialize_unordered_bulk_op() for job in jobs: bulk.find({'guid': job['guid']}).upsert().replace_one(job) bulk.execute()
def update_job_source(guid, buid, name, clear_cache=False): """Composed method for resopnding to a guid update.""" logger.info("Updating Job Source %s", guid) # Make the BusinessUnit and Company create_businessunit(buid) bu = BusinessUnit.objects.get(id=buid) bu.title = name bu.save() add_company(bu) # Lookup the jobs, filter then, transform them, and then load the jobs zf = get_jobsfs_zipfile(guid) jobs = get_jobs_from_zipfile(zf, guid) jobs = filter_current_jobs(jobs, bu) jobs = [hr_xml_to_json(job, bu) for job in jobs] for job in jobs: job['link'] = make_redirect(job, bu).make_link() add_jobs(jobs) remove_expired_jobs(buid, jobs) # Update business information bu.associated_jobs = len(jobs) bu.date_updated = datetime.datetime.utcnow() bu.save() if clear_cache: # Clear cache in 25 minutes to allow for solr replication tasks.task_clear_bu_cache.delay(buid=bu.id, countdown=1500)
def update_job_source(guid, buid, name, clear_cache=False): """Composed method for resopnding to a guid update.""" assert(re.match(r'^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$', guid), "%s is not a valid guid" % guid) assert(re.match(r'^\d+$', str(buid)), "%s is not a valid buid" % buid) logger.info("Updating Job Source %s", guid) # Make the BusinessUnit and Company create_businessunit(buid) bu = BusinessUnit.objects.get(id=buid) bu.title = name bu.save() add_company(bu) # Lookup the jobs, filter then, transform them, and then load the jobs zf = get_jobsfs_zipfile(guid) jobs = get_jobs_from_zipfile(zf, guid) jobs = filter_current_jobs(jobs, bu) jobs = (hr_xml_to_json(job, bu) for job in jobs) jobs = (add_redirect(job, bu) for job in jobs) # AT&T Showed that large numbers of MOCs can cause import issues due to the size of documents. # Therefore, when processing AT&T lower the document chunk size. if int(buid) == 19389: logger.warn("AT&T has large amounts of mapped_mocs, that cause problems. Reducing chunk size.") upload_chunk_size = 64 else: upload_chunk_size = 1024 job_ids = add_jobs(jobs, upload_chunk_size) remove_expired_jobs(buid, job_ids) # Update business information bu.associated_jobs = len(job_ids) bu.date_updated = datetime.datetime.utcnow() bu.save() if clear_cache: # Clear cache in 25 minutes to allow for solr replication tasks.task_clear_bu_cache.delay(buid=bu.id, countdown=1500)
def update_job_source(guid, buid, name): """Composed method for resopnding to a guid update.""" assert re.match(r'^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$', guid.upper()), \ "%s is not a valid guid" % guid assert re.match(r'^\d+$', str(buid)), "%s is not a valid buid" % buid logger.info("Updating Job Source %s", guid) # Make the BusinessUnit and Company create_businessunit(buid) bu = BusinessUnit.objects.get(id=buid) bu.title = name bu.save() add_company(bu) # Lookup the jobs, filter then, transform them, and then load the jobs zf = get_jobsfs_zipfile(guid) jobs = get_jobs_from_zipfile(zf, guid) jobs = filter_current_jobs(jobs, bu) jobs = (hr_xml_to_json(job, bu) for job in jobs) jobs = (add_redirect(job, bu) for job in jobs) # AT&T Showed that large numbers of MOCs can cause import issues due to the size of documents. # Therefore, when processing AT&T lower the document chunk size. if int(buid) == 19389: logger.warn( "AT&T has large amounts of mapped_mocs, that cause problems. Reducing chunk size." ) upload_chunk_size = 64 else: upload_chunk_size = 1024 job_ids = add_jobs(jobs, upload_chunk_size) remove_expired_jobs(buid, job_ids) # Update business information bu.associated_jobs = len(job_ids) bu.date_updated = datetime.datetime.utcnow() bu.save()
def send_sns_confirm(response): """ Called when a job feed file is ready to be imported. Calls celery update tasks. """ # Postajob buids and state job bank buids allowed_buids=[1228, 5480]+range(2650, 2704) logger.info("sns received for SEOXML") if response: if response['Subject']!='END': buid=response['Subject'] if int(buid) in allowed_buids: logger.info("Creating update_solr task for %s"%buid) set_title=helpers.create_businessunit(int(buid)) task_update_solr.delay(buid, force=True, set_title=set_title) task_seoxml_to_mongo.delay(buid) else: logger.info("Skipping update_solr for %s because it is not in the allowed buids list." % buid)
def send_sns_confirm(response): """ Called when a job feed file is ready to be imported. Calls celery update tasks. """ # Postajob buids and state job bank buids allowed_buids = [1228, 5480] + range(2650, 2704) logger.info("sns received for SEOXML") if response: if response['Subject'] != 'END': buid = response['Subject'] if int(buid) in allowed_buids: logger.info("Creating update_solr task for %s" % buid) set_title = helpers.create_businessunit(int(buid)) task_update_solr.delay(buid, force=True, set_title=set_title) task_seoxml_to_mongo.delay(buid) else: logger.info( "Skipping update_solr for %s because it is not in the allowed buids list." % buid)