def filterLeadsBySource(request, id): user_id = request.user.id company_id = request.user.company_id start_date = request.GET.get('start_date') end_date = request.GET.get('end_date') source = request.GET.get('source') query_type = request.GET.get('query_type') page_number = int(request.GET.get('page_number')) items_per_page = int(request.GET.get('per_page')) offset = (page_number - 1) * items_per_page if start_date is not None: local_start_date_naive = datetime.fromtimestamp(float(start_date)) local_start_date = get_current_timezone().localize(local_start_date_naive, is_dst=None) if end_date is not None: local_end_date_naive = datetime.fromtimestamp(float(end_date)) local_end_date = get_current_timezone().localize(local_end_date_naive, is_dst=None) utc_current_date = datetime.utcnow() #print 'filter start us ' + str(local_start_date) + ' and edn is ' + str(local_end_date) try: start_date_field_qry = 'leads__hspt__properties__createdate__gte' end_date_field_qry = 'leads__hspt__properties__createdate__lte' source_field_qry = 'leads__hspt__properties__hs_analytics_source' company_field_qry = 'company_id' querydict = {company_field_qry: company_id, source_field_qry: source, start_date_field_qry : local_start_date, end_date_field_qry : local_end_date} total = Lead.objects(**querydict).count() leads = Lead.objects(**querydict).skip(offset).limit(items_per_page) serializer = LeadSerializer(leads, many=True) return JsonResponse({'count' : total, 'results': serializer.data}) except Exception as e: return JsonResponse({'Error' : str(e)})
def write_to_database(self, lead_info, company_id): name = ' '.join(lead_info['name']) phone_number = lead_info['phone'][0] email = lead_info['lead_email'] renter_brand = lead_info['renterBrand'][0] date_of_inquiry = lead_info['date_of_inquiry'] sent_text_date = lead_info['sent_text_date'] sent_email_date = lead_info['sent_email_date'] company = Company.objects.get(pk=int(company_id)) # Write to table lead = Lead( name=name, phone_number=phone_number, email=email, renter_brand=renter_brand, sent_text_date=sent_text_date, sent_email_date=sent_email_date, date_of_inquiry=date_of_inquiry, company=company, ) lead.save() # Add 'written_to_database' key to lead_info dict with value of True after lead has been written to database lead_info['written_to_database'] = True
def handle(self, *args, **options): try: leadList = Lead.objects.all() except leadList.DoesNotExist: raise CommandError('leadList does not exist') for lead in leadList: if lead.fed_everyday: lead_copy = Lead( #not so good solution needed to copy and register a unique field email="id_%s_of_%s_%s" % (lead.id, uuid.uuid4(), lead.email), food=lead.food, kindoffood=lead.kindoffood, how_much_food=lead.how_much_food, measure=lead.measure, how_many_ducks=lead.how_many_ducks, fed_time=lead.fed_time, #avoids false data progression fed_everyday=False, address=lead.address, geolocation=lead.geolocation, created_at=time.strftime("%d-%m-%Y-%H-%M-%S")) lead_copy.save() self.stdout.write( self.style.SUCCESS('Successfully copied Lead "%s"' % lead.id))
def saveHsptContacts(user_id=None, company_id=None, leadList=None): try: for newLead in leadList: newLead = vars(newLead)['_field_values'] hspt_id = str(newLead['vid']) #print 'gs id is ' + str(hspt_id) #hspt_sfdc_id = str(newLead['sfdcLeadId']) # check if there is a corresponding lead from SFDC hspt_sfdc_id = None if 'salesforceleadid' in newLead['properties']: hspt_sfdc_id = str(newLead['properties']['salesforceleadid']) # temp fix by satya till SFDC ID field in Hubspot is discovered #addThisList = True existingLeadSfdc = None existingLead = None existingLead = Lead.objects(Q(company_id=company_id) & Q(hspt_id=hspt_id)).first() if existingLead is not None and 'hspt' in existingLead.leads: # we found this lead already in the DB Lead.objects(Q(company_id=company_id) & Q(hspt_id=hspt_id)).update(leads__hspt=newLead) # if 'hspt' in existingLead.lists: # currentLists = existingLead.lists['mkto'] # for i in range(len(currentLists)): # if currentLists[i]['id'] == newList['id']: # check if this activity already exists in the lead dict # addThisList = False # if addThisList == True: # currentLists.append(newList) # existingLead.update(lists__mkto=currentLists) # else: # currentLists = [] # currentLists.append(newList) # existingLead.update(lists__mkto=currentLists) elif existingLead is None: # this lead does not exist if hspt_sfdc_id is not None: # but has a SFDC lead id print 'found lead with SFDC ID ' + str(hspt_sfdc_id) existingLeadSfdc = Lead.objects(Q(company_id=company_id) & Q(leads__sfdc__Id=hspt_sfdc_id)).first() if existingLeadSfdc is not None: # we found a SFDC lead record which is matched to this new Mkto lead existingLeadSfdc.hspt_id = hspt_id existingLeadSfdc.leads['hspt'] = newLead existingLeadSfdc.save() # currentLists = [] # currentLists.append(newList) # existingLeadSfdc.update(lists__mkto=currentLists) if existingLeadSfdc is None and existingLead is None: # no matches found so save new record lead = Lead() lead.hspt_id = hspt_id lead.company_id = company_id lead.leads["hspt"] = newLead lead.save() # currentLists = [] # currentLists.append(newList) # lead.update(lists__mkto=currentLists) except Exception as e: send_notification(dict(type='error', success=False, message=str(e)))
def form_valid(self,form,*args): try: lead = Lead(**form.cleaned_data) lead.save()#создание объекта связи, если форма валидна except: messages.error(self.request, "Сообщение не передано") else: messages.success(self.request, "Сообщение передано, ждите ответа") return super().form_valid(form,*args)
def post(self, request, *args, **kwargs): if self.request.user.is_authenticated(): form = self.get_form() if form.is_valid(): data_dict = dict(self.request.POST.items()) data_dict['source'] = LeadSource.objects.get(id=data_dict['source']) del data_dict['csrfmiddlewaretoken'] lead_obj = Lead(created_by=self.request.user, assign_to=self.request.user, **data_dict) lead_obj.save() return self.form_valid(form) else: return self.form_invalid(form) return HttpResponseRedirect(reverse('login-view'))
def contact_page(request): if request.method == 'POST': form = LeadForm(request.POST) if form.is_valid(): new_lead = Lead() new_lead.name = request.POST.get('name', '') new_lead.subject = request.POST.get('subject', '') new_lead.email = request.POST.get('email', '') new_lead.message = request.POST.get('message', '') new_lead.save() messages.success( request, '{}, Your message was delivered!'.format(new_lead.name)) else: messages.error(request, 'Something is going wrong! Try again!') form = LeadForm() print(form) context = {'form': form} return render(request, 'leads/contact.html', context=context)
def test_save_lead_and_active_client(self): lead = Lead.objects.get(id=1) lead.state = "LOST" lead.save() lead = Lead.objects.get(id=1) self.assertTrue(lead.client.active) # There's still anotger active lead for this client otherLead = Lead.objects.get(id=3) otherLead.state = "SLEEPING" otherLead.save() lead = Lead.objects.get(id=1) self.assertFalse(lead.client.active) newLead = Lead() newLead.subsidiary_id = 1 newLead.client = lead.client newLead.save() self.assertTrue(lead.client.active) # A new lead on this client should mark it as active again
def saveSfdcAccountsToMaster(user_id=None, company_id=None, job_id=None, run_type=None): #delete later #job_id = ObjectId("569fd4078afb002426ef2fd3") if run_type == 'initial': accounts = TempData.objects(Q(company_id=company_id) & Q(record_type='account') & Q(source_system='sfdc') & Q(job_id=job_id) ).only('source_record') #& Q(job_id=job_id) else: accounts = TempDataDelta.objects(Q(company_id=company_id) & Q(record_type='account') & Q(source_system='sfdc') & Q(job_id=job_id) ).only('source_record') #& Q(job_id=job_id) accountListTemp = list(accounts) accountList = [i['source_record'] for i in accountListTemp] try: for newAccount in accountList: sfdc_id = str(newAccount['Id']) #find all leads that have this account ID relatedLeadList = [] relatedLeadListTemp = None relatedLeads = Lead.objects(Q(company_id=company_id) & Q(sfdc_account_id=sfdc_id)).only('sfdc_contact_id') #if SFDC Account, then matching lead must have a SFDC Contact ID if relatedLeads is not None: relatedLeadListTemp = [lead.to_mongo().to_dict() for lead in relatedLeads] #print 'rll is ' + str(relatedLeadListTemp) for i in relatedLeadListTemp: if 'sfdc_contact_id' in i: relatedLeadList.append({'sfdc_contact_id': i['sfdc_contact_id']}) #print 'related leads are ' + str(relatedLeadList) # if relatedLeads is not None: # #leadListTemp = list(relatedLeads) # #relatedLeadList = [i.id for i in leadListTemp] # for lead in relatedLeads: # relatedLeadList.append(lead) print 'account id is ' + sfdc_id # sfdc_mkto_id = str(newLead['sfdcLeadId']) #check if there is a corresponding lead from MKTO existingAccount = None existingAccount = Account.objects(Q(company_id=company_id) & Q(sfdc_id=sfdc_id)).first() if existingAccount is not None: # we found this contact already in the DB print 'found existing account for id ' + str(sfdc_id) if 'sfdc' in existingAccount.accounts: existingAccount.source_name = newAccount['Name'] existingAccount.source_source = newAccount['AccountSource'] existingAccount.source_industry = newAccount['Industry'] existingAccount.source_created_date = newAccount['CreatedDate'] existingAccount.accounts["sfdc"] = newAccount if relatedLeadList is not None: existingAccount.leads = relatedLeadList existingAccount.save() #Lead.objects(Q(company_id=company_id) & Q(sfdc_contact_id=sfdc_contact_Id)).update(contacts__sfdc=newContact) else: existingAccount.accounts['sfdc'] = {} existingAccount.accounts['sfdc'] = newAccount if relatedLeadList is not None: existingAccount.leads = relatedLeadList existingAccount.save() elif existingAccount is None: # this account does not exist account = _saveSfdcNewAccount(sfdc_id, newAccount, relatedLeadList, company_id) except Exception as e: print 'exception while saving accounts ' + str(e) send_notification(dict(type='error', success=False, message=str(e)))
def saveSfdcContactHistoryToMaster(user_id=None, company_id=None, job_id=None, run_type=None): #job_id = ObjectId("56a6faa28afb00042171cd89") if run_type == 'initial': #activities = TempData.objects(Q(company_id=company_id) & Q(record_type='activity') & Q(source_system='mkto') & Q(job_id=job_id) ).only('source_record') collection = TempData._get_collection() activities = collection.find({'company_id': int(company_id), 'record_type': 'contact_history', 'source_system': 'sfdc', 'job_id': job_id}, projection={'source_record': True}, batch_size=1000) else: collection = TempDataDelta._get_collection() activities = collection.find({'company_id': int(company_id), 'record_type': 'contact_history', 'source_system': 'sfdc', 'job_id': job_id}, projection={'source_record': True}, batch_size=1000) try: print 'got history ' + str(activities.count()) for activity in activities: newActivity = activity['source_record'] addThisActivity = True print 'act is ' + str(newActivity) leadId = newActivity["ContactId"] if leadId is not None: print 'trying to get lead' existingLead = Lead.objects(Q(company_id = company_id) & Q(sfdc_contact_id = leadId)).first() print 'contact is ' + str(existingLead) else: continue if existingLead is None: continue if 'sfdc' in existingLead['activities']: # there are activities from SFDC for this leadId for existingActivity in existingLead['activities']['sfdc']: if existingActivity['Id'] == newActivity['Id']: # this activity already exists so exit the loop addThisActivity = False break if addThisActivity: existingLead['activities']['sfdc'].append(newActivity) existingLead.save() print 'saved new activity 1' else: print 'no sfdc acts' addThisActivity = True sfdc = [] print 'appending' sfdc.append(newActivity) print 'appended' existingLead['activities']['sfdc'] = sfdc existingLead.save() print 'saved new activity 2' except Exception as e: print 'exception while saving SFDC Contact history to master' + str(e) send_notification(dict( type='error', success=False, message=str(e) ))
def exportMktoLeadsToCsv(data, chart_name, user_id, company_id): ids = data.get('results', None) leads = Lead.objects().filter(company_id=company_id, mkto_id__in=ids).order_by('mkto_id').hint('co_mkto_id') leads = list(leads) leads = [lead.to_mongo().to_dict() for lead in leads] if leads is None or len(leads) == 0: print 'input is none' return {'file_name': '', 'content_type' : 'text/csv'} try: print 'input not none ' + str(leads) #open a temp file for writing end_date_string = time.strftime('%Y-%m-%d_%H:%M:%S', time.localtime()) base_file_name = end_date_string + '_' + chart_name + '_leads' + '_cx.csv' file_name = '/tmp/' + base_file_name csv_out = open(file_name, 'wb') fieldnames = ['Marketo ID', 'First Name', 'Last Name', 'Email', 'Company', 'Status', 'Source', 'Original Source Type', 'Created Date', 'SFDC Lead ID', 'SFDC Contact ID', 'SFDC Account ID', ] #create writer csv_writer = csv.DictWriter(csv_out, fieldnames=fieldnames, restval='', extrasaction='ignore') csv_writer.writeheader() for lead in leads: created_date = '' print 'lead is ' + str(lead['mkto_id']) mkto_id_url = "http://app-sj09.marketo.com/leadDatabase/loadLeadDetail?leadId=" + str(lead['mkto_id']) mkto_id = '=HYPERLINK("' + mkto_id_url + '", "' + str(lead['mkto_id']) + '")' #print '1' if 'createdAt' in lead['leads']['mkto'] and lead['leads']['mkto']['createdAt'] is not None: created_date = lead['leads']['mkto']['createdAt'] #print '2' if not 'leads' in lead or not 'mkto' in lead.get('leads', ''): lead['leads'] = {} lead['leads']['mkto'] = {} for key, value in lead['leads']['mkto'].items(): if lead['leads']['mkto'][key] is None: lead['leads']['mkto'][key] = '' csv_writer.writerow({'Marketo ID' : mkto_id, 'First Name': lead.get('source_first_name', '').encode('utf-8'), 'Last Name': lead.get('source_last_name', '').encode('utf-8'), 'Email': lead.get('source_email', '').encode('utf-8'), 'Company': lead['leads']['mkto'].get('company', '').encode('utf-8'), 'Status': lead['leads']['mkto'].get('leadStatus', '').encode('utf-8'), 'Source': lead['leads']['mkto'].get('leadSource', '').encode('utf-8'), 'Original Source Type': lead['leads']['mkto'].get('originalSourceType', '').encode('utf-8'), 'Created Date': created_date, 'SFDC Lead ID': lead['leads']['mkto'].get('sfdcLeadId', '').encode('utf-8'), 'SFDC Contact ID': lead['leads']['mkto'].get('sfdcContactId', '').encode('utf-8'), 'SFDC Account ID': lead['leads']['mkto'].get('sfdcAccountId', '').encode('utf-8'), }) #print '4' csv_out.close() return {'file_name': file_name, 'content_type' : 'text/csv'} except Exception as e: print 'exception while trying to create CSV file: ' + str(e) send_notification(dict(type='error', success=False, message=str(e)))
def create_lead(): """Create test lead @return: lead object""" lead = Lead(name="laala", due_date="2008-11-01", update_date="2008-11-01 16:14:16", creation_date="2008-11-01 15:43:43", start_date="2008-11-01", responsible=None, sales=None, external_staffing="JCF", state="QUALIF", deal_id="123456", client=Client.objects.get(pk=1), salesman=None, description="A wonderfull lead that as a so so long description", subsidiary=Subsidiary.objects.get(pk=1)) lead.save() return lead
def test_save_lead(self): subsidiary = Subsidiary.objects.get(pk=1) broker = BusinessBroker.objects.get(pk=1) client = Client.objects.get(pk=1) lead = Lead(name="laalaa", state="QUALIF", client=client, salesman=None, description="A wonderfull lead that as a so so long description", subsidiary=subsidiary) deal_id = client.organisation.company.code, date.today().strftime("%y") self.assertEqual(lead.deal_id, "") # No deal id code yet lead.save() self.assertEqual(lead.deal_id, "%s%s01" % deal_id) lead.paying_authority = broker lead.save() self.assertEqual(lead.deal_id, "%s%s01" % deal_id) lead.deal_id = "" lead.save() self.assertEqual(lead.deal_id, "%s%s02" % deal_id) # 01 is already used
def getCount(request, id): object = request.GET.get('object') try: if object == 'lead': result = {'count' : Lead.objects(company_id = id).count()} elif object == 'campaign': result = {'count' : Campaign.objects(company_id = id).count()} else: result = 'Nothing to count' return JsonResponse(result, safe=False) except Exception as e: return JsonResponse({'Error' : str(e)})
def create(self, validated_data): lead_object = Lead(status=LeadStage.objects.filter(id=1).first(), **validated_data) lead_object.save() lead_object.assigned_to.set([assign_salesperson()]) lead_object.save() try: eventlog_obj = EventLog(event_lead=lead_object, event_type="New Lead", salesperson=lead_object.assigned_to, manager=manager) except: eventlog_obj = EventLog( event_lead=lead_object, event_type="New Lead", salesperson=SalesPersonUser.objects.get(pk=1), manager=SalesPersonUser.objects.get(pk=1)) print("about to send mail") send_mail( 'Testing', 'New Lead', '*****@*****.**', ['*****@*****.**'], fail_silently=False, ) return lead_object
def createLead(request, pk): # return Response({"message": "Hello, World!"}) user = User.objects.get(pk=pk) lead = Lead(user=user) if request.method == "POST": serializer = LeadSerializer(lead, data=request.data) data = {} if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def saveHsptOpportunitiesToMaster(user_id=None, company_id=None, job_id=None, run_type=None): #job_id = ObjectId("55e6b0198afb002ef6a8c292") print 'saving hspt opps to master' if run_type == 'initial': opps = TempData.objects(Q(company_id=company_id) & Q(record_type='opportunity') & Q(source_system='hspt') & Q(job_id=job_id) ).only('source_record') #& Q(job_id=job_id) else: opps = TempDataDelta.objects(Q(company_id=company_id) & Q(record_type='opportunity') & Q(source_system='hspt') & Q(job_id=job_id) ).only('source_record') #& Q(job_id=job_id) oppListTemp = list(opps) oppList = [i['source_record'] for i in oppListTemp] try: for opp in oppList: associations = opp.get('associations', None) if associations is not None: #print 'found assoc' related_leads_list = associations.get('associatedVids', None) #print 'releated leads list is ' + str(len(related_leads_list)) for i in range(len(related_leads_list)): lead_id = related_leads_list[i] #print 'lead id is ' + str(lead_id) existingLead = Lead.objects(Q(company_id=company_id) & Q(hspt_id=str(lead_id))).first() #we found an existing lead with the same VID (hspt_id) as the deal if existingLead is not None: #print 'found existing lead' if 'hspt' not in existingLead.opportunities: #print 'hspt not in opps' opportunities = {} opportunities['hspt'] = [] opportunities['hspt'].append(opp) existingLead.update(opportunities__hspt = opportunities['hspt']) else: if not any (e.get('dealId', None) == opp['dealId'] for e in existingLead.opportunities['hspt']): # does an opportunity with this Id already exist opportunities = existingLead.opportunities['hspt'] opportunities.append(opp) # save this opportunity existingLead.update(opportunities__hspt = opportunities) else: #this opp already exists for i in range(len(existingLead.opportunities['hspt'])): if existingLead.opportunities['hspt'][i]['dealId'] == opp['dealId']: existingLead.opportunities['hspt'][i] = opp existingLead.save() #if no matching lead found, continue to next opportunity except Exception as e: send_notification(dict(type='error', success=False, message=str(e)))
def matchCompanyName(request, id, companySearchName): try: company_id = request.user.company_id page_number = int(request.GET.get('page_number')) items_per_page = int(request.GET.get('per_page')) offset = (page_number - 1) * items_per_page queryset = Lead.objects(company_id=company_id).aggregate( { '$group': { '_id': '$source_company', 'count': { '$sum': 1 }, 'name' : { '$push': { '$concat': [{'$ifNull': ['$source_first_name', 'Unknown']}, ' ', {'$ifNull':['$source_last_name', 'Unknown']}]} } } }, {'$sort': OrderedDict([('_id', 1), ('count', -1) ])} ) qlist = list(queryset) results_temp = matchingAlgo(request, search_name=companySearchName, entries=qlist, object_type='company') results = results_temp[offset:offset+items_per_page] serializer = AccountCountSerializer(results, many=True) return JsonResponse({'count' : len(results_temp), 'results': serializer.data}) except Exception as e: return JsonResponse({'Error' : str(e)})
def getAccountsAndCounts(request, id): try: company_id = request.user.company_id page_number = int(request.GET.get('page_number')) items_per_page = int(request.GET.get('per_page')) offset = (page_number - 1) * items_per_page #total = Lead.objects.filter(company_id=company_id).count() #queryset = Lead.objects(company_id=company_id).item_frequencies('source_company') #.skip(offset).limit(items_per_page) queryset = Lead.objects(company_id=company_id).aggregate( { '$group': { '_id': '$source_company', 'count': { '$sum': 1 }, 'name' : { '$push': { '$concat': [ {'$ifNull': ['$source_first_name', 'Unknown']}, ' ', {'$ifNull':['$source_last_name', 'Unknown']}]} } } }, {'$sort': ('count', -1) } ) qlist = list(queryset) total = len(qlist) result = qlist[offset:offset+items_per_page] #print 'qset is ' + str(qlist) serializer = AccountCountSerializer(result, many=True) return JsonResponse({'count' : total, 'results': serializer.data}) except Exception as e: return JsonResponse({'Error' : str(e)})
def process(): # calculates number of pages pages = math.ceil(num_records / results_per_page) + 1 # creates page links using helper method below page_links = nav_list(int(pages)) # db query limiting number of results per page lead = Lead( first_name=request.form['first_name'], last_name=request.form['last_name'], email=request.form['email'], ) db.session.add(lead) db.session.commit() leads = Lead.query.limit(results_per_page).all() return render_template('/includes/_data.html', pages=page_links, current_page=1, leads=leads, all_results_pages=nav_list(int(pages)))
def computeMetrics(self, user_id = None, company_id = None): #company_id = self.request.user.company_id #start with total number of leads allLeads = Lead.objects(company_id = company_id).all() total_leads = len(allLeads) #move on to engaged leads campaignActivities = {1, 2, 3, 7, 8} # need to get these dynamically? engaged_leads = 0; for lead in allLeads: if 'mkto' in lead.activities: currentActivities = lead.activities['mkto'] for i in range(len(currentActivities)): if currentActivities[i]['activityTypeId'] in campaignActivities: engaged_leads += 1 break results = {'total_leads' : total_leads, 'engaged_leads': engaged_leads} return results
def test_save_lead_and_active_client(self): lead = Lead.objects.get(id=1) lead.state = "LOST" lead.save() lead = Lead.objects.get(id=1) self.assertTrue(lead.client.active) # There's still anotger active lead for this client otherLead = Lead.objects.get(id=3) otherLead.state = "SLEEPING" otherLead.save() lead = Lead.objects.get(id=1) self.assertFalse(lead.client.active) newLead = Lead() newLead.subsidiary_id = 1 newLead.client = lead.client newLead.save() lead = Lead.objects.get(id=1) self.assertTrue(lead.client.active) # A new lead on this client should mark it as active again
def create(self, validated_data): lead_object = Lead(status=LeadStage.objects.get(pk=1), **validated_data) lead_object.save() salesperson = assign_salesperson() manager = SalesPersonUser.objects.get( pk=1) if salesperson.manager == None else salesperson.manager lead_object.assigned_to.set([salesperson]) lead_object.save() eventlog_obj = EventLog(event_lead=lead_object, event_type="New Lead", salesperson=salesperson, manager=manager) eventlog_obj.save() send_mail( 'New Lead', f'New Lead assigned to { salesperson.email }', '*****@*****.**', [salesperson.email, manager.email], fail_silently=False, ) return lead_object
def create_lead_from_file(validated_rows, invalid_rows, user_id, source, company_id): """Parameters : validated_rows, invalid_rows, user_id. This function is used to create leads from a given file. """ email_regex = "^[_a-zA-Z0-9-]+(\.[_a-zA-Z0-9-]+)*@[a-zA-Z0-9-]+(\.[a-zA-Z0-9-]+)*(\.[a-zA-Z]{2,4})$" user = User.objects.get(id=user_id) company = Company.objects.filter(id=company_id).first() for row in validated_rows: if not Lead.objects.filter(title=row.get("title")).exists(): if re.match(email_regex, row.get("email")) is not None: try: lead = Lead() lead.title = row.get("title", "")[:64] lead.first_name = row.get("first name", "")[:255] lead.last_name = row.get("last name", "")[:255] lead.website = row.get("website", "")[:255] lead.email = row.get("email", "") lead.phone = row.get("phone", "") lead.address_line = row.get("address", "")[:255] lead.city = row.get("city", "")[:255] lead.state = row.get("state", "")[:255] lead.postcode = row.get("postcode", "")[:64] lead.country = row.get("country", "")[:3] lead.description = row.get("description", "") lead.status = row.get("status", "") lead.account_name = row.get("account_name", "")[:255] lead.created_from_site = False lead.created_by = user lead.company = company lead.save() except Exception as e: print(e)
def create_lead(source=None, msg=None, customer=None): """ Main function generate leads Keyword arguments :param source object a LeadSource object :param msg object a django mailbox Message object :param customer object :return None nothing to return """ from leads.models import Lead, LeadProperty property = None lead, lead_created = None, False TODAY = timezone.now().date() try: CUT_OFF_DAYS = settings.LEADS_CUT_OFF_DAYS except: CUT_OFF_DAYS = 30 CUT_OFF_DATE = TODAY + relativedelta(days=-CUT_OFF_DAYS) ######################## Check for a property using the id in the message ################### if source: property_id = apply_rules(source, msg, target_field=PROPERTIES) if property_id: property, property_created = get_property(property_id=property_id, source=source) else: logger.warning('No property id found in message ID:{0}, source: {1}'.format(msg.id, source.name)) if not customer: customer = get_or_create_customer(source, msg) msg_arrival=date_parse(apply_rules(source, msg, target_field=ARRIVAL)).date() or TODAY msg_departure=date_parse(apply_rules(source, msg, target_field=DEPARTURE)).date() or TODAY msg_adults=int_or_0(apply_rules(source, msg, target_field=ADULTS)) msg_children=int_or_0(apply_rules(source, msg, target_field=CHILDREN)) agency = source.agency source_name = source.name elif customer: agency = customer.agency source_name = 'Customer message' msg_arrival = msg_departure = TODAY msg_adults = msg_children = 0 else: # Cannot create a lead with no source and no customer logger.error('Message id: {0} Cannot create a lead with no source and no customer'.format(msg.id)) return '', False ############################ We have all we need to create or get a lead ############################## try: lead = Lead.objects.filter(customer=customer, agency=agency, # This line below will allow to aggregate leads from the same source # source__iexact=source_name, created__gt=CUT_OFF_DATE, booked=False).order_by('-modified')[0] except (IndexError, Lead.DoesNotExist): # No leads found, let's create one try: lead = Lead( customer=customer, arrival=msg_arrival, departure=msg_departure, adults=msg_adults, children=msg_children, source=source_name, agency=agency ) lead.save() msg.lead = lead msg.is_lead_source = True msg.save() lead_created = True except Exception as e: logger.error('Message id: {0} qualified as a lead but could not be saved, Error: {1}'.format(msg.id, repr(e))) return '', False ### If we have a property and is not in the related list we add it ###### if property: # check if there is already that property attached to the lead for the same dates, otherwise we add one lp, lp_created = LeadProperty.objects.get_or_create( lead=lead, property=property, agency=source.agency, available_from= msg_arrival or lead.arrival, available_to=msg_departure or lead.departure, defaults={'status': LeadProperty.REQUESTED} ) if lp_created: # Property not already on the list New offer must be sent lead.first_response = False ################ Notify if we created this lead, otherwise is a response #################### if lead_created: content = 'New lead from {0}'.format(lead.customer) notify(msg, content) else: msg.lead = lead msg.save() lead.modified = timezone.now() lead.save() logger.debug('Message id:{0} is a double post or a generic message from customer'.format(msg.id)) content = 'New lead from {0}'.format(lead.customer) notify(msg, content) return lead, lead_created
def save_new_jobs(): print("hello 2") json_data = get_jobs_json() now = datetime.utcnow() today_data = True LEAD_SKILLS = Lead.LEAD_SKILLS if json_data is not None: for job in json_data: try: date_difference = now - datetime.strptime( job["created_at"], "%a %b %d %H:%M:%S %Z %Y") print("Days", date_difference.days) if date_difference.days == 0: new_job = Lead() new_job.author = "Github Jobs" new_job.developer = True new_job.description = job["description"] new_job.skills = () # remove all html tags, and keep only lowercase and uppercase characters. # then keep only shared words between skills list and pure text from description. shared = set(x for x, _ in LEAD_SKILLS) & set( list( re.sub( r"<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});|[^a-zA-Z]", " ", new_job.description).upper().split())) for item in shared: new_job.skills += (item, ) new_job.how_to_apply = job["how_to_apply"] new_job.author = "Github Jobs" if job["type"] == "Full Time": new_job.job_type = 'Full-time' elif job["type"] == "Contract": new_job.job_type = 'Contract' new_job.title = job["title"] new_job.company = job["company"] application_url = re.search( r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+", job["how_to_apply"]) new_job.application_link = application_url[0] new_job.user_author = get_user_model().objects.get( pk='f3ee3abe-a186-44da-9ed2-b18ec3b570b7') new_job.remote = 'Remote' new_job.role = 'Developer' new_job.save() print('SUCCESS', new_job.title) except: print("FAILED") pass
def getOpportunities(request, id): try: company_id = request.user.company_id page_number = int(request.GET.get('page_number')) items_per_page = int(request.GET.get('per_page')) offset = (page_number - 1) * items_per_page start_date = int(request.GET.get('start_date')) end_date = int(request.GET.get('end_date')) sub_view = request.GET.get('subview') filters = request.GET.get('filters') filters = json.loads(filters) superfilters = request.GET.get('superfilters') super_filters = json.loads(superfilters) #print 'super filters are ' + str(super_filters) date_field = None querydict_filters = {} #match_filters = {} company_field_qry = 'company_id' opp_field_qry = 'opportunities__sfdc__exists' subview_field_qry = '' original_date_field = '' projection = {'$project': {'_id': '$opportunities.sfdc.Id', 'created_date': '$opportunities.sfdc.CreatedDate', 'close_date': '$opportunities.sfdc.CloseDate', 'account_name': '$source_name', 'name': '$opportunities.sfdc.Name', 'amount': '$opportunities.sfdc.Amount', 'account_id': '$sfdc_id', 'closed': '$opportunities.sfdc.IsClosed', 'won': '$opportunities.sfdc.IsWon', 'owner_id': '$opportunities.sfdc.OwnerId', 'stage': '$opportunities.sfdc.StageName' } } match = {'$match' : { }} if super_filters is not None: #print 'sf ' + str(super_filters) if 'date_types' in super_filters: # need to filter by a certain type of date date_field = super_filters['date_types'] if start_date is not None: start_date = datetime.fromtimestamp(float(start_date) / 1000) #utc_day_start_epoch = datetime.fromtimestamp(float(start_date / 1000)) #utc_day_start_epoch = str('{0:f}'.format(utc_day_start_epoch).rstrip('0').rstrip('.')) #print 'utc start epoch is ' + str(utc_day_start_epoch) #local_start_date = get_current_timezone().localize(local_start_date_naive, is_dst=None) #print 'start2 is ' + str(time.time()) if end_date is not None: end_date = datetime.fromtimestamp(float(end_date) / 1000) #utc_day_end_epoch = datetime.fromtimestamp(float(end_date / 1000)) #utc_day_end_epoch = str('{0:f}'.format(utc_day_end_epoch).rstrip('0').rstrip('.')) #print 'utc end epoch is ' + str(utc_day_end_epoch) #utc_day_start_string = datetime.strftime(utc_day_start_epoch, '%Y-%m-%dT%H-%M-%S.000+0000') #utc_day_end_string = datetime.strftime(utc_day_end_epoch, '%Y-%m-%dT%H-%M-%S.000+0000') local_start_date = get_current_timezone().localize(start_date, is_dst=None) utc_day_start = local_start_date.astimezone(pytz.timezone('UTC')) utc_day_start_string = datetime.strftime(utc_day_start, '%Y-%m-%dT%H-%M-%S.000+0000') local_end_date = get_current_timezone().localize(end_date, is_dst=None) utc_day_end = local_end_date.astimezone(pytz.timezone('UTC')) utc_day_end_string = datetime.strftime(utc_day_end, '%Y-%m-%dT%H-%M-%S.000+0000') print 'utc start string is ' + str(utc_day_start_string) #print 'utc end string is ' + str(utc_day_end_string) #remove the date_types item #super_filters.pop('date_types') if filters is not None: for key, value in filters.items(): if value is not None and value != '': querydict_filters['opportunities__sfdc__' + key] = value #creates an additional querydict that can be added to the main qd match['$match']['opportunities.sfdc.' + key] = value if date_field is None: #if there's no date filter querydict = {opp_field_qry: True, company_field_qry: company_id} querydict.update(querydict_filters) opps = Account.objects(**querydict).aggregate({'$unwind': '$opportunities.sfdc'}, match, projection) #, {'$match': {'opportunities.sfdc.Id' : {'$ne': None}}} # else: #if date filter is used if date_field == 'opportunities.sfdc.CloseDate': #change to Last Modified Date because CloseDate in Opp may not be correctly updated by user date_field = 'opportunities.sfdc.LastModifiedDate' original_date_field = date_field date_field = date_field.replace('.', '__') # needed to change embedded field format for querydict date_field_start_qry = date_field + '__gte' date_field_end_qry = date_field + '__lte' match['$match'][original_date_field] = {'$gte': utc_day_start_string, '$lte': utc_day_end_string} if original_date_field == 'opportunities.sfdc.LastModifiedDate': #if close add, add a filter for 'IsClosed' isclosed_field_qry = 'opportunities__sfdc__IsClosed' querydict = {company_field_qry: company_id, date_field_start_qry: utc_day_start_string, date_field_end_qry: utc_day_end_string, isclosed_field_qry: True} querydict.update(querydict_filters) match['$match']['opportunities.sfdc.IsClosed'] = True opps = Account.objects(**querydict).aggregate({'$unwind': '$opportunities.sfdc'}, match, projection) #, {'$match': {'opportunities.sfdc.Id' : {'$ne': None}}} # else: querydict = {company_field_qry: company_id, date_field_start_qry: utc_day_start_string, date_field_end_qry: utc_day_end_string} querydict.update(querydict_filters) opps = Account.objects(**querydict).aggregate({'$unwind': '$opportunities.sfdc'}, match, projection) #, {'$match': {'opportunities.sfdc.Id' : {'$ne': None}}} # #print 'qd is ' + str(querydict) #print 'start time was ' + str(time.time()) #total = Account.objects(**querydict).count() #print 'start time2 was ' + str(time.time()) opps_list = list(opps) #see if there's a subview if sub_view == 'closedbeforecreated': #find Opps that have a Close Date before Created Date opps_list[:] = [opp for opp in opps_list if opp['close_date'] < _str_from_date(_date_from_str(opp['created_date']).replace(tzinfo=pytz.utc).astimezone(tz.tzlocal()), 'short')] #compare the short forms of both dates as strings after they Created Date is converted to local times elif sub_view == 'nocontact': #find Opps that don't have a contact opps2 = Lead.objects(**querydict).aggregate({'$unwind': '$opportunities.sfdc'}, match, projection) opps_list2 = list(opps2) #print 'opps 2 are ' + str(list(opps2)) #opps_all = _make_hashable(opps_list) #opps_with_contacts = _make_hashable(list(opps2)) #opps_list = [dict(x) for x in set(opps_all).difference(opps_with_contacts)] for opp2 in opps_list2: opps_list[:] = [opp for opp in opps_list if opp['_id'] != opp2['_id']] total = len(opps_list) opps_list = opps_list[offset:offset + items_per_page] #print 'start time3 was ' + str(time.time()) for opp in opps_list: opp['multiple_occurences'] = False #needed due to analytical drilldown on Opps opp['created_date'] = _str_from_date(_date_from_str(opp['created_date']).replace(tzinfo=pytz.utc).astimezone(tz.tzlocal()), 'short') #convert date to local timezone #opp['owner_name'] = _map_sfdc_userid_name(company_id, opp['owner_id']) #print 'start time4 was ' + str(time.time()) opps_list = _map_sfdc_userid_name(company_id, opps_list) serializer = OpportunitySerializer(opps_list, many=True) #print 'start time6 was ' + str(time.time()) type = 'opps' return JsonResponse({'count' : total, 'results': serializer.data, 'type': type, 'source_system': 'sfdc'}) except Exception as e: print 'exception while getting all accounts ' + str(e) return JsonResponse({'Error' : str(e)})
def lead_creation_api_endpoint_1_0(request): """ Versioned (1.0) API endpoint for lead creation. Expects a POST request with lead & distributor data """ #TODO: make this not a debug thing; requests to this endpoint should NOT becoming from the host server #that would mean someone set up a post automation to the lead endpoint, potentially making an infinite loop print "Host IP: {}".format(request.get_host()) ip = request.META.get('HTTP_CF_CONNECTING_IP') if ip is None: ip = request.META.get('REMOTE_ADDR') print "Requesting IP: {}".format(ip) if str(ip).lower() in str(request.get_host()).lower() or str( request.get_host()).lower() in str(ip).lower(): return HttpResponse(status=403) #forbidden if request.method != "POST": return HttpResponse("Method Not Allowed", status=405) #method not allowed else: #fix our state data abbreviated_personal_state = state_to_abbreviation( request.POST.get('personal_state', '')) abbreviated_business_state = state_to_abbreviation( request.POST.get('business_state', '')) #Instantiate a blank lead object and populate it with data from the various stuff we get via POST newlead = Lead( first_name=request.POST.get('first_name', ''), last_name=request.POST.get('last_name', ''), home_phone=request.POST.get('home_phone', ''), cell_phone=request.POST.get('cell_phone', ''), business_phone=request.POST.get('business_phone', ''), email=request.POST.get('email', ''), personal_address=request.POST.get('personal_address', ''), personal_address_2=request.POST.get('personal_address_2', ''), personal_city=request.POST.get('personal_city', ''), personal_state=abbreviated_personal_state, personal_zip=request.POST.get('personal_zip', ''), business_address=request.POST.get('business_address', ''), business_address_2=request.POST.get('business_address_2', ''), business_city=request.POST.get('business_city', ''), business_state=abbreviated_business_state, business_zip=request.POST.get('business_zip', ''), fax_number=request.POST.get('fax_number', ''), industry=request.POST.get('industry', ''), position=request.POST.get('position', ''), website=request.POST.get('website', ''), dob=request.POST.get('dob', None), comments=request.POST.get('comments', ''), referral=request.POST.get('referral', ''), notes=request.POST.get('notes', ''), ) #Try to associate a distributor with the new lead using the POSTed API key #wrap in str() just in case / for testing with UUIDs try: key = request.POST.get('api_key', '') newlead.distributor = Distributor.objects.get(api_key=str(key)) except: return HttpResponse("Invalid Distributor Key", status=400) #bad request #Call full_clean() BEFORE associating automations to avoid firing them with invalid data try: newlead.full_clean( ) #returns None when everything is all good, raises ValidationError otherwise except ValidationError, e: return HttpResponse("Invalid Data Supplied: {}".format(e), status=400) #must save the lead here in order to create database stuff necessary for m2m relationships newlead.save() for key in request.POST.getlist('invoked_email_automations'): try: newlead.invoked_email_automations.add( EmailAutomation.objects.get(invocation_key=str(key))) except Exception, e: return HttpResponse("Error Adding Email Automation: {}".format( Exception, e), status=400)
def register(self, data): customer = data.get('customer') try: l = Lead() l_id, msg = l.register(customer) if l_id: lead = Lead.objects.get(pk=l_id) else: return None, 'Lead not found' except: return None, 'Error registering the new lead' address_from = data.get('address_from') address_to = data.get('address_to') final_price = data.get('final_price') floor_from = data.get('floor_from') floor_to = data.get('floor_to') observations = data.get('observations') packaging_price = data.get('packaging_price') packaging_time_aprox = data.get('packaging_time_aprox') payment_method = data.get('payment_method') service_date = data.get('datetime_of_service') total_price = data.get('total_price') travel_distance_aprox = data.get('travel_distance_aprox') travel_distance_aprox_label = data.get('travel_distance_aprox_label') travel_price = data.get('travel_price') travel_time_aprox = data.get('travel_time_aprox') travel_time_aprox_label = data.get('travel_time_aprox_label') home_type_from_id = data.get('home_type_from_id') home_type_to_id = data.get('home_type_to_id') truck_size_type_id = data.get('truck_size_type_id') if not address_from or not address_to: return None, 'Revise que haya ingresado direcciones válidas.' if not home_type_from_id or not home_type_to_id: return None, 'Revise que haya seleccionado un tipo de vivienda.' if not floor_from or not floor_to: return None, 'Revise que haya ingresado el número de piso.' if travel_distance_aprox is None or travel_time_aprox is None: return None, 'El mapa no está mostrando la distancia correcta.' if not truck_size_type_id: return None, 'Selecciona un tipo de camión.' if not packaging_time_aprox or not packaging_price or not travel_price or not total_price or not final_price: return None, 'Revise que todos los campos estén completos.' try: home_type_from = HomeType.objects.get(pk=home_type_from_id) home_type_to = HomeType.objects.get(pk=home_type_to_id) except: return None, 'El tipo de casa no es válido.' try: truck_size_type = TruckSizeType.objects.get(pk=truck_size_type_id) except: return None, 'El tipo de camión no es válido.' quotation = ClientQuotation.objects.create( lead=lead, address_from=address_from, address_to=address_to, final_price=final_price, floor_from=floor_from, floor_to=floor_to, observations=observations, packaging_price=packaging_price, packaging_time_aprox=packaging_time_aprox, payment_method=payment_method, service_date=service_date, total_price=total_price, travel_distance_aprox=travel_distance_aprox, travel_distance_aprox_label=travel_distance_aprox_label, travel_price=travel_price, travel_time_aprox=travel_time_aprox, travel_time_aprox_label=travel_time_aprox_label, home_type_from=home_type_from, home_type_to=home_type_to, truck_size_type=truck_size_type) quotation.save() return quotation.id, 'ok'
def create_lead_from_file(validated_rows, invalid_rows, user_id): """Parameters : validated_rows, invalid_rows, user_id. This function is used to create leads from a given file. """ email_regex = '^[_a-zA-Z0-9-]+(\.[_a-zA-Z0-9-]+)*@[a-zA-Z0-9-]+(\.[a-zA-Z0-9-]+)*(\.[a-zA-Z]{2,4})$' user = User.objects.get(id=user_id) for row in validated_rows: if not Lead.objects.filter(title=row.get('title')).exists(): if re.match(email_regex, row.get('email')) is not None: lead = Lead() lead.title = row.get('title') lead.first_name = row.get('first name') lead.last_name = row.get('last name') lead.website = row.get('website') lead.email = row.get('email') lead.phone = row.get('phone') lead.address_line = row.get('address') # lead.street = row.get('street') # lead.city = row.get('city') # lead.state = row.get('state') # lead.postcode = row.get('postcode') # lead.country = row.get('country') lead.created_by = user lead.save()
def exportHsptLeadsToCsv(data, chart_name, user_id, company_id): ids = data.get('results', None) leads = Lead.objects().filter(company_id=company_id, hspt_id__in=ids).order_by('hspt_id').hint('company_id_1_hspt_id_1') leads = list(leads) leads = [lead.to_mongo().to_dict() for lead in leads] portal_id = data.get('portal_id', None) if leads is None or portal_id is None: print 'input is none' return try: print 'input not none' #open a temp file for writing end_date_string = time.strftime('%Y-%m-%d_%H:%M:%S', time.localtime()) base_file_name = end_date_string + '_' + chart_name + '_leads' + '_cx.csv' file_name = '/tmp/' + base_file_name csv_out = open(file_name, 'wb') fieldnames = ['Hubspot ID', 'First Name', 'Last Name', 'Email', 'City', 'Country', 'Current Stage', 'Source', 'Subscriber Date', 'Lead Date', 'MQL Date', 'SQL Date', 'Opportunity Date', 'Customer Date', 'First Visit Date', 'Last Visit Date', 'First Form', 'First Form Date', 'Recent Form', 'Recent Form Date', ] #create writer csv_writer = csv.DictWriter(csv_out, fieldnames=fieldnames, restval='', extrasaction='ignore') csv_writer.writeheader() for lead in leads: subscriber_date = lead_date = mql_date = sql_date = opp_date = customer_date = first_visit_date = last_visit_date = first_conversion_date = recent_conversion_date = '' hspt_id_url = "http://app.hubspot.com/contacts/" + str(portal_id) + "/contact/" + str(lead['hspt_id']) hspt_id = '=HYPERLINK("' + hspt_id_url + '", "' + str(lead['hspt_id']) + '")' if 'hspt_subscriber_date' in lead and lead['hspt_subscriber_date'] is not None: subscriber_date = datetime.strftime(lead['hspt_subscriber_date'], '%Y-%m-%d %H:%M:%S') if 'hspt_lead_date' in lead and lead['hspt_lead_date'] is not None: lead_date = datetime.strftime(lead['hspt_lead_date'], '%Y-%m-%d %H:%M:%S') if 'hspt_mql_date' in lead and lead['hspt_mql_date'] is not None: mql_date = datetime.strftime(lead['hspt_mql_date'], '%Y-%m-%d %H:%M:%S') if 'hspt_sql_date' in lead and lead['hspt_sql_date'] is not None: sql_date = datetime.strftime(lead['hspt_sql_date'], '%Y-%m-%d %H:%M:%S') if 'hspt_opp_date' in lead and lead['hspt_opp_date'] is not None: opp_date = datetime.strftime(lead['hspt_opp_date'], '%Y-%m-%d %H:%M:%S') if 'hspt_customer_date' in lead and lead['hspt_customer_date'] is not None: customer_date = datetime.strftime(lead['hspt_customer_date'], '%Y-%m-%d %H:%M:%S') if lead['leads']['hspt']['properties'].get('hs_analytics_first_visit_timestamp', '') != '': first_visit_date = datetime.strftime(lead['leads']['hspt']['properties'].get('hs_analytics_first_visit_timestamp', ''), '%Y-%m-%d %H:%M:%S') if lead['leads']['hspt']['properties'].get('hs_analytics_last_visit_timestamp', '') != '': last_visit_date = datetime.strftime(lead['leads']['hspt']['properties'].get('hs_analytics_last_visit_timestamp', ''), '%Y-%m-%d %H:%M:%S') if lead['leads']['hspt']['properties'].get('first_conversion_date', '') != '': first_conversion_date = datetime.strftime(lead['leads']['hspt']['properties'].get('first_conversion_date', ''), '%Y-%m-%d %H:%M:%S') if lead['leads']['hspt']['properties'].get('recent_conversion_date', '') != '': recent_conversion_date = datetime.strftime(lead['leads']['hspt']['properties'].get('recent_conversion_date', ''), '%Y-%m-%d %H:%M:%S') csv_writer.writerow({'Hubspot ID' : hspt_id, 'First Name': lead['source_first_name'].encode('utf-8'), 'Last Name': lead['source_last_name'].encode('utf-8'), 'Email': lead['source_email'].encode('utf-8'), 'Country': lead['leads']['hspt']['properties'].get('country', '').encode('utf-8'), 'City': lead['leads']['hspt']['properties'].get('city', '').encode('utf-8'), 'Current Stage': lead['source_stage'].encode('utf-8'), 'Source': lead['source_source'], 'Subscriber Date': subscriber_date, 'Lead Date': lead_date, 'MQL Date': mql_date, 'SQL Date': sql_date, 'Opportunity Date': opp_date, 'Customer Date': customer_date, 'First Visit Date': first_visit_date, 'Last Visit Date': last_visit_date, 'First Form': lead['leads']['hspt']['properties'].get('first_conversion_event_name', '').encode('utf-8'), 'First Form Date': first_conversion_date, 'Recent Form': lead['leads']['hspt']['properties'].get('recent_conversion_event_name', '').encode('utf-8'), 'Recent Form Date': recent_conversion_date}) csv_out.close() return {'file_name': file_name, 'content_type' : 'text/csv'} except Exception as e: print 'exception while trying to create CSV file: ' + str(e) send_notification(dict(type='error', success=False, message=str(e)))
def saveMktoActivitiesToMaster(user_id=None, company_id=None, job_id=None, run_type=None): #job_id = ObjectId("56a2dd408afb006f9e7cb851") if run_type == 'initial': #activities = TempData.objects(Q(company_id=company_id) & Q(record_type='activity') & Q(source_system='mkto') & Q(job_id=job_id) ).only('source_record') collection = TempData._get_collection() activities = collection.find({'company_id': int(company_id), 'record_type': 'activity', 'source_system': 'mkto', 'job_id': job_id}, projection={'source_record': True}, batch_size=1000) else: collection = TempDataDelta._get_collection() activities = collection.find({'company_id': int(company_id), 'record_type': 'activity', 'source_system': 'mkto', 'job_id': job_id}, projection={'source_record': True}, batch_size=1000) #activities = TempDataDelta.objects(Q(company_id=company_id) & Q(record_type='activity') & Q(source_system='mkto') & Q(job_id=job_id) ).only('source_record') # activityListTemp = list(activities) # activityList = [i['source_record'] for i in activityListTemp] existingIntegration = CompanyIntegration.objects(company_id = company_id).first() if existingIntegration is not None: activityTypeArray = existingIntegration.integrations['mkto']['metadata']['activity'] else: print 'No activity type metadata found for Marketo' raise ValueError('No activity type metadata found for Marketo') try: for i in range(len(activityTypeArray)): if activityTypeArray[i]['name'] == 'Change Data Value': changeActivityId = activityTypeArray[i]['id'] print 'change id is ' + str(changeActivityId) for activity in activities: newActivity = activity['source_record'] addThisActivity = True #company_id = request.user.company_id mkto_id = str(newActivity['leadId']) print 'doing lead ' + mkto_id existingLead = Lead.objects(Q(mkto_id = str(mkto_id)) & Q(company_id = company_id)).first() if existingLead is not None: # we found this lead to attach the activities if 'mkto' in existingLead.activities: currentActivities = existingLead.activities['mkto'] for i in range(len(currentActivities)): if currentActivities[i]['id'] == newActivity['id']: #check if this activity already exists in the lead dict addThisActivity = False break if addThisActivity == True: for i in range(len(activityTypeArray)): if activityTypeArray[i]['id'] == newActivity['activityTypeId']: newActivity['activityTypeName'] = activityTypeArray[i]['name'] break currentActivities.append(newActivity) existingLead.update(activities__mkto = currentActivities) else: currentActivities = [] for i in range(len(activityTypeArray)): if activityTypeArray[i]['id'] == newActivity['activityTypeId']: newActivity['activityTypeName'] = activityTypeArray[i]['name'] break currentActivities.append(newActivity) existingLead.update(activities__mkto = currentActivities) #addThisActivity == True and if addThisActivity == True and newActivity['activityTypeId'] == changeActivityId and newActivity['primaryAttributeValue'] == 'Lead Status': print 'processing status activity for id ' + mkto_id #statusRecord = []; newStatus = '' oldStatus = '' for attribute in newActivity['attributes']: if attribute['name'] == 'New Value': newStatus = attribute['value'] elif attribute['name'] == 'Old Value': oldStatus = attribute['value'] # elif attribute['name'] == 'Reason': # reason = attribute['value'] #break #statusRecord.append({'status': newStatus, 'date': newActivity['activityDate']}) newActivity['newStatus'] = newStatus newActivity['oldStatus'] = oldStatus newActivity['date'] = newActivity['activityDate'] if 'mkto' in existingLead.statuses: currentStatuses = existingLead.statuses['mkto'] currentStatuses.append(newActivity) # changed on 1/22/2016 {'status': newStatus, 'date': newActivity['activityDate']}) existingLead.update(statuses__mkto = currentStatuses) else: currentStatuses = [] currentStatuses.append(newActivity) # changed on 1/22/2016{'status': newStatus, 'date': newActivity['activityDate']}) existingLead.update(statuses__mkto = currentStatuses) # if addThisActivity == True: # this activity was not foudn in the lead, so add it # existingLead.activities['mkto'].append(newActivity) # existingLead.save() # no concept of saving the activity if the lead does not exist except Exception as e: send_notification(dict( type='error', success=False, message=str(e) ))
def filterLeads(request, id): user_id = request.user.id company_id = request.user.company_id start_date = request.GET.get('start_date') end_date = request.GET.get('end_date') lead_type = request.GET.get('lead_type') query_type = request.GET.get('query_type') page_number = int(request.GET.get('page_number')) items_per_page = int(request.GET.get('per_page')) offset = (page_number - 1) * items_per_page if start_date is not None: local_start_date_naive = datetime.fromtimestamp(float(start_date)) local_start_date = get_current_timezone().localize(local_start_date_naive, is_dst=None) if end_date is not None: local_end_date_naive = datetime.fromtimestamp(float(end_date)) local_end_date = get_current_timezone().localize(local_end_date_naive, is_dst=None) #print 'filter start us ' + str(local_start_date) + ' and edn is ' + str(local_end_date) try: leads = [] if lead_type is not None: date_field_map = { "Subscribers" : 'hs_lifecyclestage_subscriber_date', "Leads" : 'hs_lifecyclestage_lead_date', "MQLs" : 'hs_lifecyclestage_marketingqualifiedlead_date', "SQLs" : 'hs_lifecyclestage_salesqualifiedlead_date', "Opportunities" : 'hs_lifecyclestage_opportunity_date', "Customers" : 'hs_lifecyclestage_customer_date' } start_date_field_qry = 'leads__hspt__properties__' + date_field_map[lead_type] + '__gte' end_date_field_qry = 'leads__hspt__properties__' + date_field_map[lead_type] + '__lte' if query_type == "strict": # for the Contacts Distribution chart stage_field_map = { "Subscribers" : 'subscriber', "Leads" : 'lead', "MQLs" : 'marketingqualifiedlead', "SQLs" : 'salesqualifiedlead', "Opportunities" : 'opportunity', "Customers" : 'customer' } stage_field_qry = 'leads__hspt__properties__lifecyclestage' else: start_date_field_qry = 'leads__hspt__properties__createdate__gte' end_date_field_qry = 'leads__hspt__properties__createdate__lte' company_field_qry = 'company_id' if query_type == "strict": querydict = {company_field_qry: company_id, start_date_field_qry : local_start_date, end_date_field_qry : local_end_date, stage_field_qry : stage_field_map[lead_type]} #end_date_field_qry : local_end_date, #print 'qmap is ' + str(querydict) else: querydict = {company_field_qry: company_id, end_date_field_qry : local_start_date} #, end_date_field_qry : local_end_date #print 'qd is ' + str(querydict) if query_type == "strict": #we are done total = Lead.objects(**querydict).count() leads = Lead.objects(**querydict).skip(offset).limit(items_per_page) else: #not done. need to loop through leads to find which leads truly meet the criteria leads_temp = Lead.objects(**querydict) for lead in leads_temp: include_this_lead = True properties = lead.leads['hspt']['properties'] if lead_type == 'Subscribers': if "hs_lifecyclestage_lead_date" in properties: current_stage = "Leads" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_marketingqualifiedlead_date" in properties: current_stage = "MQLs" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_salesqualifiedlead_date" in properties: current_stage = "SQLs" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_opportunity_date" in properties: current_stage = "Opportunities" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_customer_date" in properties: current_stage = "Customers" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if lead_type == 'Leads': if include_this_lead == True and "hs_lifecyclestage_marketingqualifiedlead_date" in properties: current_stage = "MQLs" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_salesqualifiedlead_date" in properties: current_stage = "SQLs" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_opportunity_date" in properties: current_stage = "Opportunities" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_customer_date" in properties: current_stage = "Customers" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if lead_type == 'MQLs': if include_this_lead == True and "hs_lifecyclestage_salesqualifiedlead_date" in properties: current_stage = "SQLs" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_opportunity_date" in properties: current_stage = "Opportunities" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_customer_date" in properties: current_stage = "Customers" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if lead_type == 'SQLs': if include_this_lead == True and "hs_lifecyclestage_opportunity_date" in properties: current_stage = "Opportunities" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True and "hs_lifecyclestage_customer_date" in properties: current_stage = "Customers" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if lead_type == 'Opportunities': if include_this_lead == True and "hs_lifecyclestage_customer_date" in properties: current_stage = "Customers" current_stage_date = pytz.utc.localize(properties[date_field_map[current_stage]], is_dst=None) current_stage_date = current_stage_date.astimezone(get_current_timezone()) if current_stage_date <= local_start_date: include_this_lead = False if include_this_lead == True: leads.append(lead) total = len(leads) leads = leads[offset:offset + items_per_page] serializer = LeadSerializer(leads, many=True) return JsonResponse({'count' : total, 'results': serializer.data}) except Exception as e: return JsonResponse({'Error' : str(e)})
def saveSfdcOpportunitiesToMaster(user_id=None, company_id=None, job_id=None, run_type=None): #job_id = ObjectId("56a3f89f8afb003c13a59e26") if run_type == 'initial': opps = TempData.objects(Q(company_id=company_id) & Q(record_type='opportunity') & Q(source_system='sfdc') & Q(job_id=job_id) ).only('source_record') #& Q(job_id=job_id) contacts = TempData.objects(Q(company_id=company_id) & Q(record_type='contact') & Q(source_system='sfdc') & Q(job_id=job_id) ).only('source_record') #& Q(job_id=job_id) else: opps = TempDataDelta.objects(Q(company_id=company_id) & Q(record_type='opportunity') & Q(source_system='sfdc') & Q(job_id=job_id) ).only('source_record') #& Q(job_id=job_id) contacts = TempDataDelta.objects(Q(company_id=company_id) & Q(record_type='contact') & Q(source_system='sfdc') & Q(job_id=job_id) ).only('source_record') #& Q(job_id=job_id) oppListTemp = list(opps) oppList = [i['source_record'] for i in oppListTemp] contactListTemp = list(contacts) contactList = [i['source_record'] for i in contactListTemp] try: allOpps = oppList #['records'] # below code copied from contacts.tasks for newContact in contactList: #['records']: if 'OpportunityContactRoles' not in newContact or newContact['OpportunityContactRoles'] is None: # if this contact has no opportunities continue # move to next contact # company_id = request.user.company_id sfdc_account_id = None thisLeadsOppsIds = newContact['OpportunityContactRoles']['records'] thisLeadsOpps = [] for opp in thisLeadsOppsIds: #loop through all the Opp records in the Contact record print 'trying for opp with id ' + str(opp['OpportunityId']) thisOpp = next((x for x in allOpps if x['Id'] == opp['OpportunityId']), None) # if this opp is found in the list of opps retrieved separately if thisOpp is not None: # if found print 'found this opp' sfdc_account_id = thisOpp['AccountId'] thisLeadsOpps.append(thisOpp) #add it sfdc_contact_Id = str(newContact['Id']) print 'contact id is ' + sfdc_contact_Id # sfdc_mkto_id = str(newLead['sfdcLeadId']) #check if there is a corresponding lead from MKTO existingLeadMkto = None existingLeadSfdc = None existingLeadHspt = None existingContact = Lead.objects(Q(company_id=company_id) & Q(sfdc_contact_id=sfdc_contact_Id)).first() if existingContact is not None: # we found this contact already in the DB print ' eC is not none' if 'sfdc' not in existingContact.opportunities: opportunities = {} opportunities['sfdc'] = [] opportunities['sfdc'].extend(thisLeadsOpps) existingContact.update(opportunities__sfdc = opportunities['sfdc']) existingContact.update(sfdc_account_id = sfdc_account_id) print 'just updated acct id 1' else: for newOpp in thisLeadsOpps: print ' nefre get' if not any (e.get('Id', None) == newOpp['Id'] for e in existingContact.opportunities['sfdc']): # does an opportunity with this Id already exist opportunities = existingContact.opportunities['sfdc'] opportunities.append(newOpp) existingContact.sfdc_account_id = sfdc_account_id # save this opportunity existingContact.update(opportunities__sfdc = opportunities) existingContact.update(sfdc_account_id = sfdc_account_id) print 'just updated acct id 2' else: #this opp already exists for i in range(len(existingContact.opportunities['sfdc'])): if existingContact.opportunities['sfdc'][i]['Id'] == newOpp['Id']: existingContact.opportunities['sfdc'][i] = newOpp existingContact.sfdc_account_id = sfdc_account_id existingContact.save() print 'just updated acct id 3' elif existingContact is None: # this lead does not exist print ' eC is much none' existingLeadSfdc = Lead.objects(Q(company_id=company_id) & Q(leads__sfdc__ConvertedContactId=sfdc_contact_Id)).first() if existingLeadSfdc is not None: if 'sfdc' not in existingLeadSfdc.opportunities: opportunities = {} opportunities['sfdc'] = [] opportunities['sfdc'].extend(thisLeadsOpps) existingLeadSfdc.update(opportunities__sfdc = opportunities['sfdc']) existingLeadSfdc.update(sfdc_account_id = sfdc_account_id) print 'just updated acct id 4' else: for newOpp in thisLeadsOpps: if not any (e.get('Id', None) == newOpp['Id'] for e in existingLeadSfdc.opportunities['sfdc']): # does an opportunity with this Id already exist opportunities = existingLeadSfdc.opportunities['sfdc'] opportunities.append(newOpp) # save this opportunity existingLeadSfdc.update(opportunities__sfdc = opportunities) existingLeadSfdc.update(sfdc_account_id = sfdc_account_id) print 'just updated acct id 5' else: #this opp already exists for i in range(len(existingLeadSfdc.opportunities['sfdc'])): if existingLeadSfdc.opportunities['sfdc'][i]['Id'] == newOpp['Id']: existingLeadSfdc.opportunities['sfdc'][i] = newOpp existingLeadSfdc.sfdc_account_id = sfdc_account_id existingLeadSfdc.save() print 'just updated acct id 6' else: existingLeadMkto = Lead.objects(Q(company_id=company_id) & Q(leads__mkto__sfdcContactId=sfdc_contact_Id)).first() if existingLeadMkto is not None: # we found a MKto lead record which is matched to this new Sfdc lead if 'sfdc' not in existingLeadMkto.opportunities: opportunities = {} opportunities['sfdc'] = [] opportunities['sfdc'].extend(thisLeadsOpps) existingLeadMkto.update(opportunities__sfdc = opportunities['sfdc']) existingLeadMkto.update(sfdc_account_id = sfdc_account_id) else: for newOpp in thisLeadsOpps: if not any (e.get('Id', None) == newOpp['Id'] for e in existingLeadMkto.opportunities['sfdc']): # does an opportunity with this Id already exist opportunities = existingLeadMkto.opportunities['sfdc'] opportunities.append(newOpp) # save this opportunity existingLeadMkto.update(opportunities__sfdc = opportunities) existingLeadMkto.update(sfdc_account_id = sfdc_account_id) else: #this opp already exists for i in range(len(existingLeadMkto.opportunities['sfdc'])): if existingLeadMkto.opportunities['sfdc'][i]['Id'] == newOpp['Id']: existingLeadMkto.opportunities['sfdc'][i] = newOpp existingLeadMkto.sfdc_account_id = sfdc_account_id existingLeadMkto.save() existingLeadHspt = Lead.objects(Q(company_id=company_id) & Q(leads__hspt__properties__salesforcecontactid=sfdc_contact_Id)).first() if existingLeadHspt is not None: # we found a Hspt lead record which is matched to this new Sfdc lead if 'sfdc' not in existingLeadHspt.opportunities: opportunities = {} opportunities['sfdc'] = [] opportunities['sfdc'].extend(thisLeadsOpps) existingLeadHspt.update(opportunities__sfdc = opportunities['sfdc']) existingLeadHspt.update(sfdc_account_id = sfdc_account_id) else: for newOpp in thisLeadsOpps: if not any (e.get('Id', None) == newOpp['Id'] for e in existingLeadHspt.opportunities['sfdc']): # does an opportunity with this Id already exist opportunities = existingLeadHspt.opportunities['sfdc'] opportunities.append(newOpp) # save this opportunity existingLeadHspt.update(opportunities__sfdc = opportunities) existingLeadHspt.update(sfdc_account_id = sfdc_account_id) else: #this opp already exists for i in range(len(existingLeadHspt.opportunities['sfdc'])): if existingLeadHspt.opportunities['sfdc'][i]['Id'] == newOpp['Id']: existingLeadHspt.opportunities['sfdc'][i] = newOpp existingLeadHspt.sfdc_account_id = sfdc_account_id existingLeadHspt.save() if existingLeadSfdc is None and existingLeadMkto is None and existingLeadHspt is None and existingContact is None: # no matches found so save new record lead = Lead() lead.sfdc_contact_id = sfdc_contact_Id lead.company_id = company_id lead.source_first_name = newContact['FirstName'] lead.source_last_name = newContact['LastName'] lead.source_email = newContact['Email'] lead.source_created_date = str(newContact['CreatedDate']) lead.source_source = newContact['LeadSource'] lead.sfdc_account_id = sfdc_account_id lead.save() contacts = {} contacts['sfdc'] = newContact #lead.contacts["sfdc"] = newContact lead.update(contacts__sfdc = contacts['sfdc']) print 'just updated acct id 7' opportunities = {} opportunities['sfdc'] = [] opportunities['sfdc'].extend(thisLeadsOpps) lead.update(opportunities__sfdc = opportunities['sfdc']) #new code added on 1/24/2016 - add each opportunity to related account (to capture opps with no contacts) for opp in allOpps: account = Account.objects(Q(company_id=company_id) & Q(sfdc_id=opp['AccountId'])).first() if account is None: print 'no account for opp with ID ' + str(opp['Id']) sfdc = Salesforce() accountList = sfdc.get_single_account(user_id, company_id, opp['AccountId']) for newAccount in accountList['records']: account = _saveSfdcNewAccount(newAccount['Id'], newAccount, None, company_id) if 'opportunities' not in account: account.update(opportunities = {}) if 'sfdc' not in account.opportunities: #no opps exist for this account, so add this opp opportunities = {} opportunities['sfdc'] = [] opportunities['sfdc'].append(opp) account.update(opportunities__sfdc = opportunities['sfdc']) else: if not any (e.get('Id', None) == opp['Id'] for e in account.opportunities['sfdc']): # there is no opportunity with this Id already exist opportunities = account.opportunities['sfdc'] opportunities.append(opp) # save this opportunity account.update(opportunities__sfdc = opportunities) else: # this opp already exists for i in range(len(account.opportunities['sfdc'])): if account.opportunities['sfdc'][i]['Id'] == opp['Id']: account.opportunities['sfdc'][i] = opp account.save() # code commented out since we are no longer getting only Mkto related opportunities into Cx # for newOpp in oppList['records']: # # # company_id = request.user.company_id # sfdc_account_id = str(newOpp['AccountId']) # find the account ID # print 'account id is ' + sfdc_account_id # # sfdc_mkto_id = str(newLead['sfdcLeadId']) #check if there is a corresponding lead from MKTO # existingLeadMkto = None # existingLeadSfdc = None # existingLeadHspt = None # #existingContact = Lead.objects(Q(company_id=company_id) & Q(sfdc_contact_id=sfdc_contact_Id)).first() # existingLeadSfdc = Lead.objects(Q(company_id=company_id) & Q(leads__sfdc__ConvertedAccountId=sfdc_account_id)).first() # if existingLeadSfdc is not None: # if 'sfdc' not in existingLeadSfdc.opportunities: # opportunities = {} # opportunities['sfdc'] = [] # opportunities['sfdc'].append(newOpp) # add this opp to the new array # existingLeadSfdc.update(opportunities__sfdc = opportunities) # else: # if not any (e.get('Id', None) == newOpp['Id'] for e in existingLeadSfdc.opportunities['sfdc']): # does an opportunity with this Id already exist # opportunities = existingLeadSfdc.opportunities['sfdc'] # opportunities.append(newOpp) # # save this opportunity # existingLeadSfdc.update(opportunities__sfdc = opportunities) # else: #this opp already exists # for i in range(len(existingLeadSfdc.opportunities['sfdc'])): # if existingLeadSfdc.opportunities['sfdc'][i]['Id'] == newOpp['Id']: # existingLeadSfdc.opportunities['sfdc'][i] = newOpp # existingLeadSfdc.save() # # # and move to the next opportunity in the loop # continue # else: #this opp does not exist within an SFDC lead # # check if it is a Marketo lead # existingLeadMkto = Lead.objects(Q(company_id=company_id) & Q(leads__mkto__sfdcAccountId=sfdc_account_id)).first() # if existingLeadMkto is not None: # we found a MKto lead record which is matched to this opp # print 'found mkto lead' + existingLeadMkto.mkto_id # if 'sfdc' not in existingLeadMkto.opportunities: # opportunities = [] # opportunities.append(newOpp) # add this opp to the new array # existingLeadMkto.update(opportunities__sfdc = opportunities) # print 'saved opps' # else: # if ['opportunities']['sfdc'] already exists # print 'opp exist' # if not any (e.get('Id', None) == newOpp['Id'] for e in existingLeadMkto.opportunities['sfdc']): # does an opportunity with this Id already exist # opportunities = existingLeadMkto.opportunities['sfdc'] # opportunities.append(newOpp) # # save this opportunity # existingLeadMkto.update(opportunities__sfdc = opportunities) # print 'saved sfdc' # else: #this opp already exists # print 'opp does not exist' # for i in range(len(existingLeadMkto.opportunities['sfdc'])): # if existingLeadMkto.opportunities['sfdc'][i]['Id'] == newOpp['Id']: # existingLeadMkto.opportunities['sfdc'][i] = newOpp # existingLeadMkto.save() # print 'saved update' # #move on to the next opp # continue # existingLeadHspt = Lead.objects(Q(company_id=company_id) & Q(leads__hspt__properties__salesforceaccountid=sfdc_account_id)).first() # if existingLeadHspt is not None: # we found a MKto lead record which is matched to this new Sfdc lead # if 'sfdc' not in existingLeadHspt.opportunities: # opportunities = {} # opportunities['sfdc'] = [] # opportunities['sfdc'].append(newOpp) # add this opp to the new array # existingLeadHspt.update(opportunities__sfdc = opportunities) # else: # if not any (e.get('Id', None) == newOpp['Id'] for e in existingLeadHspt.opportunities['sfdc']): # does an opportunity with this Id already exist # opportunities = existingLeadHspt.opportunities['sfdc'] # opportunities.append(newOpp) # # save this opportunity # existingLeadHspt.update(opportunities__sfdc = opportunities) # else: #this opp already exists # for i in range(len(existingLeadHspt.opportunities['sfdc'])): # if existingLeadHspt.opportunities['sfdc'][i]['Id'] == newOpp['Id']: # existingLeadHspt.opportunities['sfdc'][i] = newOpp # existingLeadHspt.save() # # move onto the next opp # continue # # if existingLeadSfdc is None and existingLeadMkto is None and existingLeadHspt is None: # no matches found so throw error i,e, not possible # #raise ValueError('Opportunity found without lead or contact') # lead = Lead() # lead.sfdc_id = sfdc_Id # lead.company_id = company_id # lead.source_first_name = newLead['FirstName'] # lead.source_last_name = newLead['LastName'] # lead.source_email = newLead['Email'] # lead.source_created_date = str(newLead['CreatedDate']) # lead.source_source = newLead['LeadSource'] # lead.source_status = newLead['Status'] # lead.leads["sfdc"] = newLead # print '5th save' # lead.save() except Exception as e: print 'Error while saving SFDC opportunities ' + str(e) send_notification(dict(type='error', success=False, message=str(e)))
def getAccounts(request, id): try: company_id = request.user.company_id page_number = int(request.GET.get('page_number')) items_per_page = int(request.GET.get('per_page')) offset = (page_number - 1) * items_per_page start_date = int(request.GET.get('start_date')) end_date = int(request.GET.get('end_date')) sub_view = request.GET.get('subview') superfilters = request.GET.get('superfilters') super_filters = json.loads(superfilters) #print 'super filters are ' + str(super_filters) date_field = None if super_filters is not None: if 'date_types' in super_filters: # need to filter by a certain type of date date_field = super_filters['date_types'] if start_date is not None: utc_day_start_epoch = datetime.fromtimestamp(float(start_date / 1000)) #utc_day_start_epoch = str('{0:f}'.format(utc_day_start_epoch).rstrip('0').rstrip('.')) print 'utc start epoch is ' + str(utc_day_start_epoch) #local_start_date = get_current_timezone().localize(local_start_date_naive, is_dst=None) #print 'start2 is ' + str(time.time()) if end_date is not None: utc_day_end_epoch = datetime.fromtimestamp(float(end_date / 1000)) #utc_day_end_epoch = str('{0:f}'.format(utc_day_end_epoch).rstrip('0').rstrip('.')) print 'utc end epoch is ' + str(utc_day_end_epoch) utc_day_start_string = datetime.strftime(utc_day_start_epoch, '%Y-%m-%dT%H-%M-%S.000+0000') utc_day_end_string = datetime.strftime(utc_day_end_epoch, '%Y-%m-%dT%H-%M-%S.000+0000') print 'utc start string is ' + str(utc_day_start_string) print 'utc end string is ' + str(utc_day_end_string) result = [] company_field_qry = 'company_id' #print 'start time was ' + str(time.time()) collection = Account._get_collection() if date_field is None: total = collection.find({'company_id': int(company_id)}).count() #.hint('company_id_1') else: total = collection.find({'company_id': int(company_id), date_field: {'$gte':utc_day_start_string, '$lte':utc_day_end_string}}).count() #.hint('company_id_1') if date_field is None: queryset = Account.objects(company_id=company_id).skip(offset).limit(items_per_page) else: date_field_start_qry = date_field + '__gte' date_field_end_qry = date_field + '__lte' company_field_qry = 'company_id' querydict = {company_field_qry: company_id, date_field_start_qry: utc_day_start_string, date_field_end_qry: utc_day_end_string} queryset = Account.objects(**querydict).skip(offset).limit(items_per_page) #qlist = list(queryset) #print 'start time3 was ' + str(time.time()) #total = len(qlist) #result = qlist[offset:offset+items_per_page] #print 'start time4 was ' + str(time.time()) for account in queryset: leadsTemp = [] leads = account['leads'] for lead in leads: # each 'lead' here is an object of type {lead_id_type: lead_id} e.g. {'sfdc_contact_id': 1234} for k, v in lead.iteritems(): lead_field_qry = k querydict = {lead_field_qry: v, company_field_qry: company_id} qset = Lead.objects(**querydict).only('source_first_name').only('source_last_name').only('id').first() #print 'qset ' + str(qset) #qset_actual_lead_list_temp = [qset_lead.to_mongo().to_dict() for qset_lead in qset] #for qset_actual_lead in qset_actual_lead_list_temp: leadsTemp.append(qset) account['leads'] = leadsTemp result.append(account) #result.sort(key=lambda account:len(account.leads)) #print 'qset is ' + str(qlist) #print 'start time5 was ' + str(time.time()) serializer = AccountSerializer(result, many=True) #print 'start time6 was ' + str(time.time()) type = 'accounts' return JsonResponse({'count' : total, 'results': serializer.data, 'type': type}) except Exception as e: print 'exception while getting all accounts ' + str(e) return JsonResponse({'Error' : str(e)})
def filterLeadsByDuration(request, id): user_id = request.user.id company_id = request.user.company_id start_date = request.GET.get('start_date') end_date = request.GET.get('end_date') lead_type = request.GET.get('lead_type') query_type = request.GET.get('query_type') page_number = int(request.GET.get('page_number')) items_per_page = int(request.GET.get('per_page')) offset = (page_number - 1) * items_per_page if start_date is not None: local_start_date_naive = datetime.fromtimestamp(float(start_date)) local_start_date = get_current_timezone().localize(local_start_date_naive, is_dst=None) if end_date is not None: local_end_date_naive = datetime.fromtimestamp(float(end_date)) local_end_date = get_current_timezone().localize(local_end_date_naive, is_dst=None) utc_current_date = datetime.utcnow() #print 'filter start us ' + str(local_start_date) + ' and edn is ' + str(local_end_date) try: leads = [] new_leads = [] lead_type_temp = "" if lead_type is not None: date_field_map = { "Subscribers" : 'hs_lifecyclestage_subscriber_date', "Leads" : 'hs_lifecyclestage_lead_date', "MQLs" : 'hs_lifecyclestage_marketingqualifiedlead_date', "SQLs" : 'hs_lifecyclestage_salesqualifiedlead_date', "Opportunities" : 'hs_lifecyclestage_opportunity_date', "Customers" : 'hs_lifecyclestage_customer_date' } company_field_qry = 'company_id' # for the Contacts Distribution chart stage_field_map = { "Subscribers" : 'subscriber', "Leads" : 'lead', "MQLs" : 'marketingqualifiedlead', "SQLs" : 'salesqualifiedlead', "Opportunities" : 'opportunity', "Customers" : 'customer' } stage_field_qry = 'leads__hspt__properties__lifecyclestage' if lead_type != "All": start_date_field_qry = 'leads__hspt__properties__' + date_field_map[lead_type] + '__gte' end_date_field_qry = 'leads__hspt__properties__' + date_field_map[lead_type] + '__lte' querydict = {company_field_qry: company_id, start_date_field_qry : local_start_date, end_date_field_qry : local_end_date, stage_field_qry : stage_field_map[lead_type]} total = Lead.objects(**querydict).count() leads = Lead.objects(**querydict).skip(offset).limit(items_per_page) else: lead_type_temp = "All" #start_date_field_qry = 'leads__hspt__properties__createdate__gte' #end_date_field_qry = 'leads__hspt__properties__createdate__lte' #querydict = {company_field_qry: company_id, start_date_field_qry : local_start_date, end_date_field_qry : local_end_date} querydict = {company_field_qry: company_id} leads_temp = Lead.objects(**querydict)#.skip(offset).limit(items_per_page) for lead_temp in leads_temp: this_lead_stage_temp = lead_temp['leads']['hspt']['properties']['lifecyclestage'] for k, v in stage_field_map.items(): if v == this_lead_stage_temp: this_lead_stage = k print 'this lead str is ' + str(this_lead_stage) + 'and id is ' + lead_temp['hspt_id'] break started_this_stage_date = lead_temp['leads']['hspt']['properties'][date_field_map[this_lead_stage]] #print 'dates ' + str(local_start_date_naive) + ' XX ' + str(started_this_stage_date) + ' XX ' + str(local_end_date_naive) if local_start_date_naive <= started_this_stage_date and started_this_stage_date <= local_end_date_naive: leads.append(lead_temp) #we have all the leads for All so now apply offset and items per page total = len(leads) #print 'total for All is ' + str(total) leads = leads[offset:offset + items_per_page] #print 'qmap is ' + str(querydict) print 'qd2 is ' + str(querydict) #print 'in there ' + str(len(leads)) for lead in leads: # iterate over each lead #print ' lead id is ' + lead['hspt_id'] lead_props = lead['leads']['hspt']['properties'] if lead_type_temp == "All": # if it is all, find the lead stsage from lead record this_lead_stage = lead_props['lifecyclestage'] for stage, stagename in stage_field_map.iteritems(): if stagename == this_lead_stage: lead_type = stage #print 'lead type is ' + lead_type #handle average days in current stage if date_field_map[lead_type] not in lead['leads']['hspt']['properties']: raise ValueError("This is not possible") started_this_stage_date = lead_props[date_field_map[lead_type]] days_in_this_stage = (utc_current_date - started_this_stage_date).total_seconds() #remove conversion to seconds if you want dates; use .days then - no () lead['leads']['hspt']['properties']['days_in_this_stage'] = days_in_this_stage if (query_type != "strict"): #only get days in current stage so ignore the below #handle transition days if lead_type == "Customers": stage_date1 = lead_props.get('hs_lifecyclestage_opportunity_date') stage_date2 = lead_props.get('hs_lifecyclestage_salesqualifiedlead_date') stage_date3 = lead_props.get('hs_lifecyclestage_marketingqualifiedlead_date') stage_date4 = lead_props.get('hs_lifecyclestage_lead_date') stage_date5 = lead_props.get('hs_lifecyclestage_subscriber_date') if stage_date1 is not None and started_this_stage_date is not None: lead['leads']['hspt']['properties']["OC"] = (started_this_stage_date - stage_date1).total_seconds() # change for number of days lead['leads']['hspt']['properties']["last_stage"] = "Opportunity" elif stage_date1 is None: if stage_date2 is not None: lead['leads']['hspt']['properties']["OC"] = (started_this_stage_date - stage_date2).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "SQL" else: if stage_date3 is not None: lead['leads']['hspt']['properties']["OC"] = (started_this_stage_date - stage_date3).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "MQL" else: if stage_date4 is not None: lead['leads']['hspt']['properties']["OC"] = (started_this_stage_date - stage_date4).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "Lead" else: if stage_date5 is not None: lead['leads']['hspt']['properties']["OC"] = (started_this_stage_date - stage_date5).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "Subscriber" last_stage = lead['leads']['hspt']['properties']["last_stage"] if last_stage == "Opportunity": if stage_date2 is not None and stage_date1 is not None: lead['leads']['hspt']['properties']["SO"] = (stage_date1 - stage_date2).total_seconds() last_stage = "SQL" elif stage_date2 is None: if stage_date3 is not None: lead['leads']['hspt']['properties']["SO"] = (started_this_stage_date - stage_date3).total_seconds() last_stage = "MQL" else: if stage_date4 is not None: lead['leads']['hspt']['properties']["SO"] = (started_this_stage_date - stage_date4).total_seconds() last_stage = "Lead" else: if stage_date5 is not None: lead['leads']['hspt']['properties']["SO"] = (started_this_stage_date - stage_date5).total_seconds() last_stage = "Subscriber" if last_stage == "SQL": if stage_date3 is not None and stage_date2 is not None: lead['leads']['hspt']['properties']["MS"] = (stage_date2 - stage_date3).total_seconds() last_stage = "MQL" elif stage_date3 is None: if stage_date4 is not None: lead['leads']['hspt']['properties']["MS"] = (started_this_stage_date - stage_date4).total_seconds() last_stage = "Lead" else: if stage_date5 is not None: lead['leads']['hspt']['properties']["MS"] = (started_this_stage_date - stage_date5).total_seconds() last_stage = "Subscriber" if last_stage == "MQL": if stage_date4 is not None and stage_date3 is not None: lead['leads']['hspt']['properties']["LM"] = (stage_date3 - stage_date4).total_seconds() last_stage = "Lead" elif stage_date4 is None: if stage_date5 is not None: lead['leads']['hspt']['properties']["LM"] = (started_this_stage_date - stage_date5).total_seconds() last_stage = "Subscriber" if last_stage == "Lead": if stage_date5 is not None and stage_date4 is not None: lead['leads']['hspt']['properties']["SL"] = (stage_date4 - stage_date5).total_seconds() elif lead_type == "Opportunities": stage_date2 = lead_props.get('hs_lifecyclestage_salesqualifiedlead_date') stage_date3 = lead_props.get('hs_lifecyclestage_marketingqualifiedlead_date') stage_date4 = lead_props.get('hs_lifecyclestage_lead_date') stage_date5 = lead_props.get('hs_lifecyclestage_subscriber_date') if stage_date2 is not None and started_this_stage_date is not None: lead['leads']['hspt']['properties']["SO"] = (started_this_stage_date - stage_date2).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "SQL" elif stage_date2 is None: if stage_date3 is not None: lead['leads']['hspt']['properties']["SO"] = (started_this_stage_date - stage_date3).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "MQL" else: if stage_date4 is not None: lead['leads']['hspt']['properties']["SO"] = (started_this_stage_date - stage_date4).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "Lead" else: if stage_date5 is not None: lead['leads']['hspt']['properties']["SO"] = (started_this_stage_date - stage_date5).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "Subscriber" last_stage = lead['leads']['hspt']['properties']["last_stage"] if last_stage == "SQL": if stage_date3 is not None and stage_date2 is not None: lead['leads']['hspt']['properties']["MS"] = (stage_date2 - stage_date3).total_seconds() last_stage = "MQL" elif stage_date3 is None: if stage_date4 is not None: lead['leads']['hspt']['properties']["MS"] = (started_this_stage_date - stage_date4).total_seconds() last_stage = "Lead" else: if stage_date5 is not None: lead['leads']['hspt']['properties']["MS"] = (started_this_stage_date - stage_date5).total_seconds() last_stage = "Subscriber" if last_stage == "MQL": if stage_date4 is not None and stage_date3 is not None: lead['leads']['hspt']['properties']["LM"] = (stage_date3 - stage_date4).total_seconds() last_stage = "Lead" elif stage_date4 is None: if stage_date5 is not None: lead['leads']['hspt']['properties']["LM"] = (started_this_stage_date - stage_date5).total_seconds() last_stage = "Subscriber" if last_stage == "Lead": if stage_date5 is not None and stage_date4 is not None: lead['leads']['hspt']['properties']["SL"] = (stage_date4 - stage_date5).total_seconds() #lead['leads']['hspt']['properties']["last_stage"] = "Subscriber" elif lead_type == "SQLs": stage_date3 = lead_props.get('hs_lifecyclestage_marketingqualifiedlead_date') stage_date4 = lead_props.get('hs_lifecyclestage_lead_date') stage_date5 = lead_props.get('hs_lifecyclestage_subscriber_date') if stage_date3 is not None and started_this_stage_date is not None: lead['leads']['hspt']['properties']["MS"] = (started_this_stage_date - stage_date3).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "MQL" elif stage_date3 is None: if stage_date4 is not None: lead['leads']['hspt']['properties']["MS"] = (started_this_stage_date - stage_date4).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "Lead" else: if stage_date5 is not None: lead['leads']['hspt']['properties']["MS"] = (started_this_stage_date - stage_date5).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "Subscriber" last_stage = lead['leads']['hspt']['properties']["last_stage"] if last_stage == "MQL": if stage_date4 is not None and stage_date3 is not None: lead['leads']['hspt']['properties']["LM"] = (stage_date3 - stage_date4).total_seconds() last_stage = "Lead" elif stage_date4 is None: if stage_date5 is not None: lead['leads']['hspt']['properties']["LM"] = (started_this_stage_date - stage_date5).total_seconds() last_stage = "Subscriber" if last_stage == "Lead": if stage_date5 is not None and stage_date4 is not None: lead['leads']['hspt']['properties']["SL"] = (stage_date4 - stage_date5).total_seconds() #lead['leads']['hspt']['properties']["last_stage"] = "Subscriber" elif lead_type == "MQLs": stage_date4 = lead_props.get('hs_lifecyclestage_lead_date') stage_date5 = lead_props.get('hs_lifecyclestage_subscriber_date') if stage_date4 is not None and started_this_stage_date is not None: lead['leads']['hspt']['properties']["LM"] = (started_this_stage_date - stage_date4).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "Lead" elif stage_date4 is None: if stage_date5 is not None: lead['leads']['hspt']['properties']["LM"] = (started_this_stage_date - stage_date5).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "Subscriber" last_stage = lead['leads']['hspt']['properties']["last_stage"] if last_stage == "Lead": if stage_date5 is not None and stage_date4 is not None: lead['leads']['hspt']['properties']["SL"] = (stage_date4 - stage_date5).total_seconds() #lead['leads']['hspt']['properties']["last_stage"] = "Subscriber" elif lead_type == "Leads": stage_date5 = lead_props.get('hs_lifecyclestage_subscriber_date') if stage_date5 is not None and started_this_stage_date is not None: lead['leads']['hspt']['properties']["SL"] = (started_this_stage_date - stage_date5).total_seconds() lead['leads']['hspt']['properties']["last_stage"] = "Subscriber" new_leads.append(lead) #print 'lead props are' + '\n'.join(str(p) for p in new_leads[0].leads["hspt"]["properties"]) #print 'old lead props are' + '\n'.join(str(p) for p in lead) serializer = LeadSerializer(new_leads, many=True) return JsonResponse({'count' : total, 'results': serializer.data}) else: #lead_type is None - not allowed here return JsonResponse({'Error' : 'Lead Type cannot be empty'}) except Exception as e: return JsonResponse({'Error' : str(e)})
def create_lead_from_file(validated_rows, invalid_rows, user_id, source): """Parameters : validated_rows, invalid_rows, user_id. This function is used to create leads from a given file. """ email_regex = '^[_a-zA-Z0-9-]+(\.[_a-zA-Z0-9-]+)*@[a-zA-Z0-9-]+(\.[a-zA-Z0-9-]+)*(\.[a-zA-Z]{2,4})$' user = User.objects.get(id=user_id) for row in validated_rows: if not Lead.objects.filter(title=row.get('title')).exists(): if re.match(email_regex, row.get('email')) is not None: try: lead = Lead() lead.title = row.get('title', '')[:64] lead.first_name = row.get('first name', '')[:255] lead.last_name = row.get('last name', '')[:255] lead.website = row.get('website', '')[:255] lead.email = row.get('email', '') lead.phone = row.get('phone', '') lead.address_line = row.get('address', '')[:255] lead.city = row.get('city', '')[:255] lead.state = row.get('state', '')[:255] lead.postcode = row.get('postcode', '')[:64] lead.country = row.get('country', '')[:3] lead.description = row.get('description', '') lead.status = row.get('status', '') lead.account_name = row.get('account_name', '')[:255] lead.created_from_site = False lead.created_by = user lead.save() except e: print(e)
def saveSfdcContactsToMaster(user_id=None, company_id=None, job_id=None, run_type=None): # delete later # job_id = ObjectId("569adcfc8afb00205c799f28") if run_type == "initial": contacts = TempData.objects( Q(company_id=company_id) & Q(record_type="contact") & Q(source_system="sfdc") & Q(job_id=job_id) ).only( "source_record" ) # & Q(job_id=job_id) else: contacts = TempDataDelta.objects( Q(company_id=company_id) & Q(record_type="contact") & Q(source_system="sfdc") & Q(job_id=job_id) ).only( "source_record" ) # & Q(job_id=job_id) print "co id is " + str(company_id) contactListTemp = list(contacts) contactList = [i["source_record"] for i in contactListTemp] # print 'saving sfdc contacts' try: # get the custom field for Contact Status, if it exists existingIntegration = CompanyIntegration.objects(company_id=company_id).first() contact_status = None if "sfdc" in existingIntegration["integrations"]: contact_status = existingIntegration["mapping"].get("sfdc_contact_status", None) for newContact in contactList: # ['records']: # company_id = request.user.company_id sfdc_contact_Id = str(newContact["Id"]) print "contact id is " + sfdc_contact_Id # sfdc_mkto_id = str(newLead['sfdcLeadId']) #check if there is a corresponding lead from MKTO existingLeadMkto = None existingLeadSfdc = None existingLeadHspt = None existingContact = Lead.objects(Q(company_id=company_id) & Q(sfdc_contact_id=sfdc_contact_Id)).first() if existingContact is not None: # we found this contact already in the DB print "found contact match for " + str(sfdc_contact_Id) if "sfdc" in existingContact.contacts: existingContact.source_first_name = newContact["FirstName"] existingContact.source_last_name = newContact["LastName"] existingContact.source_email = newContact["Email"] # existingContact.source_created_date = str(newContact['CreatedDate']) existingContact.source_source = newContact["LeadSource"] if contact_status is not None and contact_status in newContact: existingContact.source_status = newContact[contact_status] existingContact.contacts["sfdc"] = newContact existingContact.save() # Lead.objects(Q(company_id=company_id) & Q(sfdc_contact_id=sfdc_contact_Id)).update(contacts__sfdc=newContact) else: existingContact.contacts["sfdc"] = {} existingContact.contacts["sfdc"] = newContact existingContact.save() # elif existingContact is None: # this lead does not exist else: existingLeadSfdc = ( Lead.objects(Q(company_id=company_id) & Q(leads__sfdc__ConvertedContactId=sfdc_contact_Id)) .hint("company_id_1_leads.sfdc.convertedContactId_1") .first() ) if existingLeadSfdc is not None: print "found match for sfdc lead for contact " + str(sfdc_contact_Id) # existingLeadSfdcList = list(existingLeadSfdc) existingLeadSfdc.sfdc_contact_id = sfdc_contact_Id if contact_status is not None and contact_status in newContact: existingLeadSfdc.source_status = newContact[contact_status] existingLeadSfdc.contacts = {} existingLeadSfdc.contacts["sfdc"] = newContact existingLeadSfdc.save() # remove below comments after figuring out how Mkto stored SFDC contact ID else: existingLeadMkto = Lead.objects( Q(company_id=company_id) & Q(leads__mkto__sfdcContactId=sfdc_contact_Id) ).first() if ( existingLeadMkto is not None ): # we found a MKto lead record which is matched to this new Sfdc lead print "found mkto lead" + existingLeadMkto.mkto_id existingLeadMkto.sfdc_contact_id = sfdc_contact_Id # existingLeadMkto.contacts = {} existingLeadMkto.contacts["sfdc"] = newContact existingLeadMkto.save() else: existingLeadHspt = Lead.objects( Q(company_id=company_id) & Q(leads__hspt__properties__salesforcecontactid=sfdc_contact_Id) ).first() if ( existingLeadHspt is not None ): # we found a MKto lead record which is matched to this new Sfdc lead existingLeadHspt.sfdc_contact_id = sfdc_contact_Id existingLeadHspt.contacts = {} existingLeadHspt.contacts["sfdc"] = newContact existingLeadHspt.save() if ( existingLeadSfdc is None and existingLeadMkto is None and existingLeadHspt is None and existingContact is None ): # no matches found so save new record lead = Lead() lead.sfdc_contact_id = sfdc_contact_Id lead.company_id = company_id lead.source_first_name = newContact["FirstName"] lead.source_last_name = newContact["LastName"] lead.source_email = newContact["Email"] lead.source_created_date = str(newContact["CreatedDate"]) lead.source_source = newContact["LeadSource"] if contact_status is not None and contact_status in newContact: lead.source_status = newContact[contact_status] lead.contacts = {} lead.contacts["sfdc"] = newContact lead.save() # lead = Lead() # company_id = request.user.company_id # derived_id = 'sfdc_' + str(newLead['Id']) # Lead.objects(derived_id = derived_id).modify(upsert=True, new=True, set__leads__sfdc = newLead, set_on_insert__derived_id = derived_id, set_on_insert__company_id = company_id) # # oldLead = Lead.objects(derived_id = lead.derived_id) # if oldLead.count() == 0: # lead.leads["sfdc"] = newLead # lead.save() # else: # oldLead.leads["sfdc"] = newLead # Lead.objects(derived_id = lead.derived_id).update(oldLead) except Exception as e: print "exception while saving SFDC contact " + str(e) send_notification(dict(type="error", success=False, message=str(e)))