Example #1
0
    def get_queryset(self):
        
        status = self.kwargs['status']
        company_id = self.request.user.get_company()
        print 'co id is ' + str(company_id)
        record_id = CompanyIntegration.objects(company_id = company_id ).only('id').first()
        existingIntegration = {}
        existingIntegration['integrations'] = {}
        
        map = Code("function () {"
                 "  for (var key in this.integrations) emit(key, this.integrations[key]['access_token']); } ")
        
        reduce = Code("function (key, values) { return null; } ")
        
        results = CompanyIntegration.objects(company_id=company_id).map_reduce(map, reduce, "inline")
        results = list(results)
        for result in results:
            existingIntegration['integrations'][result.key] = {'access_token': result.value}
              
        
#         for key in existingIntegration.integrations.keys():
#             if 'metadata' in existingIntegration.integrations[key]:
#                 existingIntegration.integrations[key]['metadata'] = {} #empty the metadata object so that excess data is not sent back
#             if 'metadata' in existingIntegration.integrations[key]:
#                 existingIntegration.integrations[key]['metadata'] = {} #empty the metadata object so that excess data is not sent back
        if existingIntegration['integrations'] != {}: #assume there's onlg one entry
            if status == 'new':
                queryset = SuperIntegration.objects(code__nin=existingIntegration['integrations'].keys())
                for obj in queryset:
                    #company_info = CompanyGenericIntegrationSerializer()
                    obj.company_info = {}
            elif status == 'existing':
                queryset = SuperIntegration.objects(code__in=existingIntegration['integrations'].keys())
                for obj in queryset:
                    #company_info = CompanyGenericIntegrationSerializer(existingIntegration.integrations[obj.code])
                    # print "code is " + str(existingIntegration.integrations[obj.code])
                    
                    obj.company_info = existingIntegration['integrations'][obj.code]
                    obj.company_info["code"] = obj.code
                    obj.company_info['record_id'] = json.dumps(record_id['id'], default=json_util.default)
            return queryset
        # if we are here, there are no records for the company 
        if status == 'new':
            print 'new sttatus'
            queryset_new = SuperIntegration.objects.all()    
        elif status == 'existing':
            queryset_new = None    
        return queryset_new
Example #2
0
def get_tw_handles_buffer(request, id):
    try:
        company = Company.objects.filter(company_id=id).first()
        
        existingIntegration = CompanyIntegration.objects(company_id = id ).first()
        if 'bufr' in existingIntegration['integrations']: # if Buffer is present and configured
            client_id = existingIntegration['integrations']['bufr']['client_id']
            client_secret = existingIntegration['integrations']['bufr']['client_secret']
            access_token = existingIntegration['integrations']['bufr']['access_token']
            buffer = Buffer()
            api = Buffer.get_api(buffer, client_id=client_id, client_secret=client_secret, access_token=access_token)
            profiles = Buffer.get_twitter_profiles(buffer, api)
            results = []
            for profile in profiles:
                new_profile = {}
                new_profile['id'] = profile['id']
                new_profile['service'] = profile['service']
                new_profile['service_id'] = profile['service_id']
                new_profile['service_username'] = profile['service_username']
                results.append(new_profile)
            #serializedList = BufferProfileSerializer(profiles, many=True)
            return JsonResponse({'results' :results})
        else:
            return JsonResponse({'error' : 'No integration found with Buffer'})
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #3
0
def publishMl(request, id, masterlist_id):
    try:
        company = Company.objects.filter(company_id=id).first()
        ml = TweetMasterList.objects(Q(company=company.id) & Q(id=masterlist_id)).first()
        
        if ml is None:
            return HttpResponse("Could not find the Master List for publishing", status=status.HTTP_400_BAD_REQUEST)
        tweets = ml['tweets']
        shuffle(tweets)
        #buffer_profile_id = '558f150b7409ab382f11a39e' #change to parameter later
        
        existingIntegration = CompanyIntegration.objects(company_id = id ).first()
        if 'bufr' in existingIntegration['integrations']: # if Buffer is present and configured
            client_id = existingIntegration['integrations']['bufr']['client_id']
            client_secret = existingIntegration['integrations']['bufr']['client_secret']
            access_token = existingIntegration['integrations']['bufr']['access_token']
            buffer = Buffer()
            api = Buffer.get_api(buffer, client_id=client_id, client_secret=client_secret, access_token=access_token)
            #profile = Buffer.get_twitter_profile(buffer, api)
            profile = Buffer.get_twitter_profile_by_id(buffer, api, ml.buffer_profile_id)
            #print 'posting to profile ' + profile['id']
            for tweet in tweets:
                try:
                    post = quote_plus(tweet['text'])
                    Buffer.post_to_twitter(buffer, api, post, profile)
                except Exception as e:
                    continue
            TweetMasterList.objects(Q(company=company.id) & Q(id=masterlist_id)).update(published=True)
            TweetMasterList.objects(Q(company=company.id) & Q(id=masterlist_id)).update(published_date=datetime.utcnow())
            return HttpResponse("Tweets posted", status=status.HTTP_200_OK)
        else:
            return HttpResponse("No publishing integration found", status=status.HTTP_400_BAD_REQUEST)
    
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #4
0
def filterTwInteractions(request, id):
    user_id = request.user.id
    company_id = request.user.company_id
    start_date = request.GET.get('start_date')
    end_date = request.GET.get('end_date')
    interaction_type = request.GET.get('interaction_type')
    series_type = request.GET.get('series_type')
    query_type = request.GET.get('query_type')
    page_number = int(request.GET.get('page_number'))
    items_per_page = int(request.GET.get('per_page'))
    system_type = request.GET.get('system_type')
    chart_name = request.GET.get('chart_name')
    offset = (page_number - 1) * items_per_page
    
    user_id = request.user.id
    company_id = request.user.company_id
    existingIntegration = CompanyIntegration.objects(company_id = company_id ).first()
    try:   
        code = None
        if existingIntegration is not None:
            for source in existingIntegration.integrations.keys():
                defined_system_type = SuperIntegration.objects(Q(code = source) & Q(system_type = system_type)).first()
                if defined_system_type is not None:
                    code = source
            #print 'found code' + str(code)
                  
        if code is  None:
            raise ValueError("No integrations defined")  
        elif code == 'bufr':
            result = filtertwInteractionsBufr(user_id=user_id, company_id=company_id, start_date=start_date, end_date=end_date, interaction_type=interaction_type, series_type=series_type, query_type=query_type, page_number=page_number, items_per_page=items_per_page, system_type=system_type, offset=offset, code=code, chart_name=chart_name)
        else:
            result =  'Nothing to report'
        return result
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #5
0
def retrieveFbokAdCampaignStats(user_id=None, company_id=None, job_id=None, run_type=None, sinceDateTime=None):
    try:
        print 'starting retrieveFbokAdCampaignStats for company ' + str(company_id)
        existingIntegration = CompanyIntegration.objects(company_id = company_id).first()
        if 'fbok' not in existingIntegration['integrations']: # if Buffer is present and configured
            print 'did not find fbok'
            raise Exception('Facebook integration not found')
        integration = existingIntegration.integrations['fbok']
        if integration['access_token'] is None:
            raise Exception('Facebook access token not found')
        fbok = Facebook(integration['host'], integration['client_id'], integration['client_secret'], integration['redirect_uri'])
        if fbok is None:
            raise Exception('Facebook object could not be created')
        
        print 'calling campaigns'
        campaigns = fbok.get_campaign_stats(company_id, run_type)
        print 'found FB campaigns: ' + str(campaigns)
        saveFbokAdCampaignStats(user_id=user_id, company_id=company_id, results=campaigns, job_id=job_id, run_type=run_type)
                
        
    except Exception as e:
        print 'exception was ' + str(e)
        send_notification(dict(
             type='error',
             success=False,
             message=str(e)
            ))      
Example #6
0
def getUserSlackMembership(request, company_id):
    try:
        #print 'in user room'
        user_id = request.user.id
        #company_id = request.user.company
        slck_auth_needed = False
        slck_user_auth_needed = False
#         company = Company.objects(company_id=company_id).first()
#         company_id = company.id
        print 'company id is ' + str(company_id)
        existingIntegration= CompanyIntegration.objects(company_id=company_id).first()
        if existingIntegration is None or 'slck' not in existingIntegration['integrations']:
            return JsonResponse({'slck_auth_needed' : True})
        #token = existingIntegration['integrations']['slck']
        userOauthRecord = UserOauth.objects(user_id= ObjectId(user_id)).first()
        if userOauthRecord is None:
            return JsonResponse({'slck_user_auth_needed' : True})
        if 'slck_access_token' in userOauthRecord and userOauthRecord['slck_access_token'] is not None and userOauthRecord['slck_access_token'] != "":
            token = userOauthRecord['slck_access_token']
        else:
            return JsonResponse({'slck_user_auth_needed' : True})
            
        slck = Slack(None, None, None, None, token)
        channels = json.loads(slck.api_call("channels.list")) 
        groups = json.loads(slck.api_call("groups.list")) 
        ims = json.loads(slck.api_call("im.list")) 
        users = json.loads(slck.api_call("users.list")) 
        if not users['ok']:
            raise ValueError('Error while retrieving users from Slack')
        rtm = slck.rtm_connect()
        return JsonResponse({'slack_channels': channels, 'slack_groups': groups, 'slack_ims': ims, 'users': users['members'], 'rtm': rtm, 'slck_auth_needed': False, 'slck_user_auth_needed': slck_user_auth_needed })  
    except Exception as e:
        return Response('Error: ' + str(e))   
Example #7
0
def getViews(request, company_id):
    company_id = request.user.company_id
    existingIntegration = CompanyIntegration.objects(company_id = company_id ).first()
    try:   
        code = None
        views_temp = []
        views = []
#         if existingIntegration is not None:
#             sources = set()
#             defined_system_types = set()
#             for source in existingIntegration.integrations.keys():
#                 sources.add(source)
#             for source in sources:
#                 #print 'source is ' + str(source)
#                 defined_system = SuperIntegration.objects(code = source).first()
#                 defined_system_types.add(defined_system.system_type)
#             for defined_system_type in defined_system_types:
#                 #print 'def system is ' + str(defined_system.system_type)
#                 if defined_system_type is not None:
#                     dashboards_temp = SuperDashboards.objects(Q(system_type = defined_system_type) & Q(status__ne='Inactive')).all()
#                     for dashboard_temp in list(dashboards_temp):
#                         serializer = SuperDashboardsSerializer(dashboard_temp, many=False) 
#                         dashboards.append(serializer.data)
        views_temp = SuperViews.objects().all()
        for view_temp in list(views_temp):
            serializer = SuperViewsSerializer(view_temp, many=False) 
            views.append(serializer.data)
        return JsonResponse({"results": views}, safe=False)
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #8
0
 def list(self, request, id=None): 
     try:
         company = CompanyIntegration.objects(company_id=id).only('initial_run_in_process').first()
         serializedList = CompanyIntegrationSerializer(company, many=False)
         return Response(serializedList.data)
     except Exception as e:
         return Response(str(e))    
Example #9
0
def retrieveSfdcOpportunities(user_id=None, company_id=None, job_id=None, run_type=None, sinceDateTime=None):  
    try:
        #company_id = request.user.company_id
        existingIntegration = CompanyIntegration.objects(company_id = company_id).first()
# code commented out since we are no longer getting only Mkto related opportunities into Cx
#         if existingIntegration is not None and 'mkto' in existingIntegration['integrations']: # if this system is connected to Marketo
#             company_qry = 'company_id'
#             type_field_qry = 'leads__mkto__exists'
#             sfdc_account_field_qry = 'leads__mkto__sfdcAccountId__ne'
#             querydict = {company_qry: company_id, type_field_qry: True, sfdc_account_field_qry: None}
#             leads_with_sfdc_opps = Lead.objects(**querydict).only('mkto_id').only('leads__mkto__sfdcAccountId')
#             
        sfdc = Salesforce()
# code commented out since we are no longer getting only Mkto related opportunities into Cx
#         account_list = '('
#         for lead in leads_with_sfdc_opps:
#             account_list += '\'' + lead['leads']['mkto']['sfdcAccountId'] + '\'' + ', '
#         account_list = account_list[:-2]
#         account_list += ')'
    
        if sinceDateTime is None:
            sinceDateTime = (datetime.now() - timedelta(days=30)).date()
        oppList = sfdc.get_opportunities_delta(user_id, company_id, _str_from_date(sinceDateTime), run_type)
        print 'got opps ' + str(len(oppList['records']))
        #create list of Opp IDs to send for get_contacts call
        oppid_list = '('
        for opp in oppList['records']:
            oppid_list += '\'' + opp['Id'] + '\'' + ', '
        oppid_list = oppid_list[:-2]
        oppid_list += ')'
        contactList = sfdc.get_contacts_for_opportunities(user_id, company_id, oppid_list) # needed because SFDC does not have the Contact ID within the Opp record
        print 'got contacts for opps ' + str(len(contactList['records']))
        saveSfdcOpportunities(user_id=user_id, company_id=company_id, oppList=oppList, contactList=contactList, job_id=job_id, run_type=run_type)
        try:
            message = 'Opportunities retrieved from Salesforce'
            notification = Notification()
            #notification.company_id = company_id
            notification.owner = user_id
            notification.module = 'Opportunities'
            notification.type = 'Background task' 
            notification.method = os.path.basename(__file__)
            notification.message = message
            notification.success = True
            notification.read = False
            notification.save()
        except Exception as e:
            send_notification(dict(
                 type='error',
                 success=False,
                 message=str(e)
                ))    
        return oppList
    except Exception as e:
        send_notification(dict(type='error', success=False, message=str(e)))      
Example #10
0
    def get(self, *args, **kwargs):
        template_name = 'integrations/new.html'
        form_class = IntegrationBaseForm

        
        instance = CompanyIntegration.objects(id=self.kwargs['id']).first()
        if instance is not None:
            queryset = instance.integrations[self.kwargs['code']];
            #print 'qset ' + queryset["host"]
            form = IntegrationBaseForm(queryset)
            return render_to_response('integrations/new.html', {'form':form})
        return HttpResponse("No editable record for " + self.kwargs['code'] + " found!", status=status.HTTP_400_BAD_REQUEST)
Example #11
0
def user_create_slack_message(request): #this is only for messages with attachments (e.g. Snapshots). Simple messages are sent via websocket from the client
    try:
        post_data = json.loads(request.body)
        channel_id = post_data['channel_id']
        message = post_data['message']
        #company_id = post_data['company_id'] 
        user_id = request.user.id
        company_id = request.user.company 
        if 'snapshot_id' in post_data:
            snapshot_id = post_data['snapshot_id']
        else:
            snapshot_id = None
        print 'snap id ' + str(snapshot_id)
        existingIntegration= CompanyIntegration.objects(company_id=company_id).first()
        if existingIntegration is None or 'slck' not in existingIntegration['integrations']:
            return JsonResponse(None)
        #token = existingIntegration['integrations']['slck']
        userOauthRecord = UserOauth.objects(user_id= ObjectId(user_id)).first()
        if userOauthRecord is None:
            return JsonResponse(None)
        if 'slck_access_token' in userOauthRecord and userOauthRecord['slck_access_token'] is not None:
            token = userOauthRecord['slck_access_token']
        else:
            return JsonResponse(None)
        slck = Slack(None, None, None, None, token)
        attachments = []
        if snapshot_id is not None:
            snapshot = Snapshot.objects(id=ObjectId(snapshot_id)).first()
            if snapshot is None:
                raise ValueError('Snapshot not found!')
            attachment = {
                          "fallback": "New Claritix" + snapshot['chart_name'] + " chart snapshot - " + settings.BASE_URL + "/snapshots/" + snapshot_id,
                          "pretext": "New Claritix chart snapshot",
                          "title": "Claritix snapshot - " +  snapshot['chart_name'] + " as of " + strftime('%Y-%m-%d %H:%M:%S', snapshot['updated_date'].timetuple()),
                          "title_link": settings.BASE_URL + "/snapshots/" + snapshot_id,
                          "text": "Click above link to view the snapshot within Claritix",
                          "color": "#0491c3",
                          "author_name": "Claritix",
                          "author_link": "http://claritix.io",
                          "author_icon": settings.BASE_URL + "/static/images/logo-icon-16x16.png"
                          }
            attachments.append(attachment)
        print 'attachme ' + str(attachments)
        
        params = {'channel': channel_id, 'text': message, 'as_user': True, "attachments": json.dumps(attachments)}
        
        result = json.loads(slck.api_call("chat.postMessage", **params))
        
        return JsonResponse({"result": result}, safe=False)
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #12
0
 def ajax(self, request):
     print 'in ajax'
     code = self.kwargs['code']
     #print 'new code is ' + code
     self.form_class = self.get_form_class();
     #print 'class is ' + str(self.form_class)
     form = self.form_class(data=json.loads(request.body))
     if form.is_valid():
         company_id = request.user.get_company()
         existingIntegration = CompanyIntegration.objects(company_id = company_id ).first()
         if existingIntegration is not None:
             if code in existingIntegration.integrations: # record for this system found - update
                 existingDict = existingIntegration.integrations
                 existingDict[code] = form.cleaned_data
                 existingDict[code]['access_token'] = ''
                 CompanyIntegration.objects(company_id = company_id ).update(integrations=existingDict)
                 print "Updated data for " + code
                 return HttpResponse("Updated data for " + code, status=status.HTTP_200_OK)
             else:
                 #existingIntegration.integrations[code] = form.cleaned_data
                 existingDict = existingIntegration.integrations
                 existingDict[code] = form.cleaned_data
                 #existingDict[code] [code]['access_token'] = ''
                 CompanyIntegration.objects(company_id = company_id ).update(integrations=existingDict)
                 return HttpResponse('New integration added to existing data', status=status.HTTP_200_OK)
         companyIntegration = CompanyIntegration()
         companyIntegration.company_id = company_id
         companyIntegration.integrations[code] = form.cleaned_data
         try:
             #print 'saving'
             companyIntegration.save()
             return HttpResponse('New integration added', status=status.HTTP_200_OK)   
         except Exception as e:
             print str(e)
             return HttpResponse(str(e), status=status.HTTP_400_BAD_REQUEST)
     else:
         response_data = {'errors': form.errors} #, 'success_url': force_text(self.success_url)
         return HttpResponse(json.dumps(response_data), content_type="application/json")
Example #13
0
def getUserSlackMessages(request, company_id):
    try:
        #print 'in user room'
        user_id = request.user.id
        #company_id = request.user.company 
        slack_id = request.GET.get("id")
        slack_type = request.GET.get("type")
#         company = Company.objects(company_id=company_id).first()
#         company_id = company.id
        existingIntegration= CompanyIntegration.objects(company_id=company_id).first()
        if existingIntegration is None or 'slck' not in existingIntegration['integrations']:
            return JsonResponse(None)
        #token = existingIntegration['integrations']['slck']
        userOauthRecord = UserOauth.objects(user_id= ObjectId(user_id)).first()
        if userOauthRecord is None:
            return JsonResponse(None)
        if 'slck_access_token' in userOauthRecord and userOauthRecord['slck_access_token'] is not None:
            token = userOauthRecord['slck_access_token']
        else:
            return JsonResponse(None)
        slck = Slack(None, None, None, None, token)
        messages = None
        params = {'channel': slack_id, 'inclusive': 1, 'count': 1000}
        if slack_type == 'group':
            messages = json.loads(slck.api_call("groups.history", **params)) 
        elif slack_type == 'channel':
            messages = json.loads(slck.api_call("channels.history", **params)) 
        elif slack_type == 'im':
            messages = json.loads(slck.api_call("im.history", **params)) 
        if not messages['ok']:
            raise ValueError('Error while retrieving messages from Slack')
        messages_list = messages['messages']
        rtm = slck.rtm_connect()
        users = json.loads(slck.api_call("users.list")) 
        if not users['ok']:
            raise ValueError('Error while retrieving users from Slack')
        
#         for message in messages_list: #join the user info to each message
#             #print 'msg is ' + str(message['user'])
#             for user in users['members']:
#                 if 'user' in message and message['user'] == user['id']:
#                     message['user_real_name'] = user['profile']['real_name']
#                     message['user_name'] = user['name']
#                     message['user_image_url'] = user['profile']['image_72']
#                     break
        
        return JsonResponse({'slack_messages': messages, 'users': users['members'], 'rtm': rtm}, safe=False)  
    except Exception as e:
        print 'error occurred: ' + str(e)
        return Response('Error: ' + str(e))   
Example #14
0
def retrieveFbokPageStats(user_id=None, company_id=None, job_id=None, run_type=None, sinceDateTime=None):
    try:
        print 'starting retrieveFbokPageStats for company ' + str(company_id)
        #print 'timestamp ' + str(sinceDateTime)
        if (datetime.now() - sinceDateTime).days > 89:
            print 'FB does not allow for more than 89 days of data'
            sinceDateTime = datetime.now() - timedelta(days=89)
        sinceTimestamp = calendar.timegm(sinceDateTime.timetuple())
        sinceTimestamp = str('{0:f}'.format(sinceTimestamp).rstrip('0').rstrip('.'))
        untilTimestamp = time.mktime(datetime.now().timetuple())
        untilTimestamp = str('{0:f}'.format(untilTimestamp).rstrip('0').rstrip('.'))
        #print 'since ' + str(sinceTimestamp) + ' until ' + str(untilTimestamp)
    
        existingIntegration = CompanyIntegration.objects(company_id = company_id).first()
        if 'fbok' not in existingIntegration['integrations']: # if Buffer is present and configured
            print 'did not find fbok'
            raise Exception('Facebook integration not found')
        integration = existingIntegration.integrations['fbok']
        if integration['access_token'] is None:
            raise Exception('Facebook access token not found')
        fbok = FacebookPage(integration['access_token'])
        if fbok is None:
            raise Exception('Facebook Page object could not be created')
        definedPages = integration.get('pages', None)
        
        print 'calling pages'
        pages = fbok.get_pages()['data']
        print 'found FB pages: ' + str(pages)
        for page in pages:
            page_token = page['access_token']
            page_id = page['id']
            if not any(d['id'] == page_id for d in definedPages): #if page not defined in company instance, skip it
                continue
            print 'page id is ' + str(page_id) + ' and token is ' + str(page_token)
            page_insights = fbok.get_page_insights(page_id, page_token, sinceTimestamp, untilTimestamp)
            #print 'page insights for ' + page['name'] + ': ' + str(page_insights)
            page_insights_cleaned = json.loads(json.dumps(page_insights['data']), object_hook = replace_dots)
            results = {'page_id': page_id, 'insights': page_insights_cleaned}
            saveFbokPageStats(user_id=user_id, company_id=company_id, results=results, job_id=job_id, run_type=run_type)
        #saveFbokAdStats(user_id=user_id, company_id=company_id, results=campaigns, job_id=job_id, run_type=run_type)
                
        
    except Exception as e:
        print 'exception was ' + str(e)
        send_notification(dict(
             type='error',
             success=False,
             message=str(e)
            ))      
Example #15
0
def _map_sfdc_userid_name(company_id, input_list):
    users = CompanyIntegration.objects(company_id=company_id).aggregate({'$unwind': '$integrations.sfdc.users.records'},  {'$project': {'_id':'$integrations.sfdc.users.records.Id', 'Fname':'$integrations.sfdc.users.records.FirstName', 'Lname':'$integrations.sfdc.users.records.LastName'}}) #{'$match': {'integrations.sfdc.users.records.Id':opp['owner_id']}}, 
    users = list(users)
    
    for item in input_list:
        for user in users:
            user_found = False
            if user['_id'] == item['owner_id']:
                item['owner_name'] = str(user['Fname']) + ' ' + str(user['Lname'])
                user_found = True
                break
        if not user_found:
            item['owner_name'] = 'Unknown'
            
    return input_list
Example #16
0
 def delete(self, *args, **kwargs):
     
     queryset = CompanyIntegration.objects(id=self.kwargs['id']).first()
     if queryset is not None and self.request.method == 'DELETE':
         try:
             integration_backup = CompanyIntegrationDeleted()
             integration_backup.company_id = queryset.company_id
             integration_backup.integrations[self.kwargs['code']] = queryset.integrations[self.kwargs['code']]
             integration_backup.save()                          
             queryset.integrations.pop(self.kwargs['code']) 
             queryset.save()
         except Exception as e:
             return HttpResponse(str(e), status=status.HTTP_400_BAD_REQUEST)   
     
     return HttpResponse('Integration for ' +  self.kwargs['code'] + ' deleted', status=status.HTTP_200_OK) 
Example #17
0
def _get_code(company_id, system_type):
    existingIntegration = CompanyIntegration.objects(company_id = company_id ).first()
    try:   
        code = None
        if existingIntegration is not None:
            for source in existingIntegration.integrations.keys():
                defined_system_type = SuperIntegration.objects(Q(code = source) & Q(system_type = system_type)).first()
                if defined_system_type is not None:
                    code = source
            #print 'found code' + str(code)
                  
        if code is  None:
            raise ValueError("No integrations defined")  
        else:
            return code
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #18
0
def filterCampaignEmailEventsByType(request, id):
    user_id = request.user.id
    company_id = request.user.company_id
    start_date = request.GET.get('start_date')
    end_date = request.GET.get('end_date')
    event_type = request.GET.get('event_type')
    campaign_guid = request.GET.get('campaign_guid')
    email_id = request.GET.get('email_id')
    query_type = request.GET.get('query_type')
    page_number = int(request.GET.get('page_number'))
    items_per_page = int(request.GET.get('per_page'))
    system_type = request.GET.get('system_type')
    chart_name = request.GET.get('chart_name')
    export_type = request.GET.get('export_type')
    offset = (page_number - 1) * items_per_page
    print ' in filter 22'
    user_id = request.user.id
    company_id = request.user.company_id
    existingIntegration = CompanyIntegration.objects(company_id = company_id ).first()
    try:   
        code = None
        if existingIntegration is not None:
            for source in existingIntegration.integrations.keys():
                defined_system_type = SuperIntegration.objects(Q(code = source) & Q(system_type = system_type)).first()
                if defined_system_type is not None:
                    code = source
                    client_secret = existingIntegration['integrations'][code]['client_secret']
            print 'found code' + str(code)
                  
        if code is  None:
            raise ValueError("No integrations defined")  
        elif code == 'mkto':
            pass
        elif code == 'sfdc': 
            pass
            #result = filterLeadsSfdc(user_id=user_id, company_id=company_id, start_date=start_date, end_date=end_date, lead_type=lead_type, query_type=query_type, page_number=page_number, items_per_page=items_per_page, system_type=system_type, offset=offset, code=code)
        elif code == 'hspt': 
            result = filterCampaignEmailEventsByTypeHspt(user_id=user_id, company_id=company_id, start_date=start_date, end_date=end_date, event_type=event_type, email_id=email_id, query_type=query_type, page_number=page_number, items_per_page=items_per_page, system_type=system_type, offset=offset, code=code, campaign_guid=campaign_guid, export_type=export_type)
            result['portal_id'] = client_secret
        else:
            result =  'Nothing to report'
        return JsonResponse(result, safe=False)
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #19
0
def retrieveBufrTwInteractions(user_id=None, company_id=None, job_id=None, run_type=None, sinceDateTime=None):
    try:
        print 'starting retrieveBufrTwInteractions for company ' + str(company_id)
        existingIntegration = CompanyIntegration.objects(company_id = company_id).first()
        if 'bufr' in existingIntegration['integrations']: # if Buffer is present and configured
            print 'found buffer'
            client_id = existingIntegration['integrations']['bufr']['client_id']
            client_secret = existingIntegration['integrations']['bufr']['client_secret']
            access_token = existingIntegration['integrations']['bufr']['access_token']
            buffer = Buffer()
            api = Buffer.get_api(buffer, client_id=client_id, client_secret=client_secret, access_token=access_token)
            profiles = Buffer.get_twitter_profiles(buffer, api)
            for profile in profiles:
                results = buffer.get_twitter_updates(profile)
                saveBufrTwInteractions(user_id=user_id, company_id=company_id, results=results, job_id=job_id, run_type=run_type)
                #print 'Tw results are ' + str(results)
        else:
            print 'No integration found with Buffer'
            return JsonResponse({'error' : 'No integration found with Buffer'})
        try:
            message = 'Twitter interactions retrieved from Buffer'
            notification = Notification()
            #notification.company_id = company_id
            notification.owner = user_id
            notification.module = 'Social'
            notification.type = 'Background task' 
            notification.method = os.path.basename(__file__)
            notification.message = message
            notification.success = True
            notification.read = False
            notification.save()
        except Exception as e:
            send_notification(dict(
             type='error',
             success=False,
             message=str(e)
            ))         
    except Exception as e:
        send_notification(dict(
             type='error',
             success=False,
             message=str(e)
            ))      
Example #20
0
def _get_system(company_id=None, system_type=None): 
    '''Find the appropriate system code e.g. mkto or sfdc for a given system type e.g. ma or crm '''
    
    if company_id is None or system_type is None:
        return None
    
    map = Code("function () {"
             "  for (var key in this.integrations) emit(key, null); } ")
    
    reduce = Code("function (key, values) { return null; } ")
    
    results = CompanyIntegration.objects(company_id=company_id).map_reduce(map, reduce, "inline")
    results = list(results)
    
    systems = SuperIntegration.objects(system_type=system_type).only('code')
    systems = list(systems)
    for system in systems:
        for result in results:
            if result.key == system['code']:
                return system['code']
    
    return None
Example #21
0
def retrieveFbokPostStats(user_id=None, company_id=None, job_id=None, run_type=None, sinceDateTime=None):
    try:
        print 'starting retrieveFbokPostStats for company ' + str(company_id)
        existingIntegration = CompanyIntegration.objects(company_id = company_id).first()
        if 'fbok' not in existingIntegration['integrations']: # if Buffer is present and configured
            print 'did not find fbok'
            raise Exception('Facebook integration not found')
        integration = existingIntegration.integrations['fbok']
        if integration['access_token'] is None:
            raise Exception('Facebook access token not found')
        fbok = FacebookPage(integration['access_token'])
        if fbok is None:
            raise Exception('Facebook Page object could not be created')
        
        print 'calling pages'
        pages = fbok.get_pages()['data']
        print 'found FB pages: ' + str(pages)
        for page in pages:
            page_token = page['access_token']
            page_id = page['id']
            posts = fbok.get_posts(page_id, page_token)['data']
            print 'got back #posts ' + str(len(posts))
            for post in posts:
                post_insights = fbok.get_post_insights(post['id'], page_token)
                #print 'post insights for ' + post['id'] + ': ' + str(post_insights)
                post_insights_cleaned = json.loads(json.dumps(post_insights['data']), object_hook = replace_dots)
                results = {'page_id': page_id, 'post_id' : post['id'], 'created_time': post['created_time'], 'insights': post_insights_cleaned}
                saveFbokPostStats(user_id=user_id, company_id=company_id, results=results, job_id=job_id, run_type=run_type)
        #saveFbokAdStats(user_id=user_id, company_id=company_id, results=campaigns, job_id=job_id, run_type=run_type)
                
        
    except Exception as e:
        print 'exception was ' + str(e)
        send_notification(dict(
             type='error',
             success=False,
             message=str(e)
            ))      
Example #22
0
def retrieveViews(request, company_id):
    start_date = request.GET.get('start_date')
    end_date = request.GET.get('end_date')
    view_name = request.GET.get('view_name')
    system_type = request.GET.get('system_type')
    
    user_id = request.user.id
    company_id = request.user.company_id
    existingIntegration = CompanyIntegration.objects(company_id = company_id ).first()
    try:
        #first see if generic view i.e. not dependent on a specific system
        generic_views = {'contacts': getLeads, 'campaigns': getCampaigns, 'accounts': getAccounts, 'opps': getOpportunities} 
        if view_name in generic_views:
            result = generic_views[view_name](request, company_id)
            return result #assume that the view will convert to JSONResponse
        else: #if view is dependent on type of system
            code = None
            if existingIntegration is not None:
                for source in existingIntegration.integrations.keys():
                    defined_system_type = SuperIntegration.objects(Q(code = source) & Q(system_type = system_type)).first()
                    if defined_system_type is not None:
                        code = source
                #print 'found code' + str(code)
                      
            if code is  None:
                raise ValueError("No integrations defined")  
            elif code == 'hspt': 
                pass
                #result = retrieveHsptDashboards(user_id=user_id, company_id=company_id, start_date=start_date, end_date=end_date, dashboard_name=dashboard_name)
            elif code == 'mkto': 
                pass
                #result = retrieveMktoDashboards(user_id=user_id, company_id=company_id, start_date=start_date, end_date=end_date, dashboard_name=dashboard_name)
            else:
                result =  {'Error': 'No view found'}
            return JsonResponse(result, safe=False)
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #23
0
def retrieveMktoActivities(user_id=None, company_id=None, job_id=None, run_type=None, sinceDateTime=None):
    #return
    try:
        print 'getting mkto activities'
        #company_id = request.user.company_id
        existingIntegration = CompanyIntegration.objects(company_id = company_id).first()
        activityTypeIds = []
#         if existingIntegration is not None:
#             activityTypeArray = existingIntegration.integrations['mkto']['metadata']['activity']
#             for i in range(len(activityTypeArray)):
#                 if (activityTypeArray[i]['name'] == 'Send Email' or activityTypeArray[i]['name'] == 'Email Delivered' or activityTypeArray[i]['name'] == 'Open Email' or activityTypeArray[i]['name'] == 'Visit Webpage' or activityTypeArray[i]['name'] == 'Fill out Form' or activityTypeArray[i]['name'] == 'Click Link' or activityTypeArray[i]['name'] == 'Email Bounced' or activityTypeArray[i]['name'] == 'Email Unsubscribed'  or activityTypeArray[i]['name'] == 'Change Data Value'):
#                     activityTypeIds.append(str(activityTypeArray[i]['id']))

        if existingIntegration is not None:
            activityTypeArray = existingIntegration.integrations['mkto']['metadata']['activity']
            for i in range(len(activityTypeArray)):
                activityTypeIds.append(str(activityTypeArray[i]['id']))

        if activityTypeIds is None:
            return []
        
        if sinceDateTime is None:
            sinceDateTime = datetime.now() - timedelta(days=30) #change to 365
        mkto = Marketo(company_id)
        
        #get the 'Claritix Lead List' in order to only get activities for leads in that list
        listList = mkto.get_lists(id=None , name=['Claritix Leads List'], programName=None, workspaceName=None, batchSize=None)
        if listList and listList[0]:
            leadListId = listList[0]['id']
        else:
            raise ValueError('Claritix Leads List not found')
        

        batch_size = 10  #10 Activity Types at a time
        activityList = []
        for i in range(0, len(activityTypeIds), batch_size):
            activityTypeIdsTemp =  activityTypeIds[i:i+batch_size]
            print 'gettng activities for ' + str(activityTypeIdsTemp)
            activityList.extend(mkto.get_lead_activity(activityTypeIdsTemp, sinceDatetime=sinceDateTime, leadListId=leadListId))
        
        
        #delete leads from lead list in Marketo
        deleteList = mkto.get_leads_by_listId(listId=leadListId)
        deleteLeadIds = [str(e['id']) for e in deleteList]
        print 'leads to be removed from CX List are ' + str(deleteLeadIds)
        batch_size = 300
        for i in range(0, len(deleteLeadIds), batch_size):
            mkto.remove_leads_from_list(listId=leadListId, leadsIds = deleteLeadIds[i:i+batch_size])
            print 'leads removed from Mkto CX List'
            
        print 'going to save mkto activities - count ' + str(len(activityList))
        saveMktoActivities(user_id=user_id, company_id=company_id, activityList=activityList, activityTypeArray=activityTypeArray, job_id=job_id, run_type=run_type)
        
        
        try:
            message = 'Activities retrieved from Marketo'
            notification = Notification()
            #notification.company_id = company_id
            notification.owner = user_id
            notification.module = 'Activities'
            notification.type = 'Background task' 
            notification.method = os.path.basename(__file__)
            notification.message = message
            notification.success = True
            notification.read = False
            notification.save()
        except Exception as e:
            send_notification(dict(
             type='error',
             success=False,
             message=str(e)
            ))         
        return activityList
    except Exception as e:
        print 'Error while retrieving activities from Marketo ' + str(e)
        send_notification(dict(
             type='error',
             success=False,
             message=str(e)
            ))   
Example #24
0
def dailyCronJob():
    print 'in cron'
    try:
        logs = [] #holds all error messages for the job
        # first get the superadmin user and the companies
        user = _get_superadmin()
        if user is None:
            mail_admins('Could not find super admin!', 'Check settings')
            return # no superadmin found
        # remotely login the user
        host = settings.BASE_URL
        url = host + '/api/v1/auth/login/'
        creds = {'email': '*****@*****.**', 'password':'******'}
        s = requests.Session()
        resp = s.post(url, data=json.dumps(creds))
        print 'resp is ' + str(resp.status_code)
        if not resp.status_code == 200:
            mail_admins('Could not login super admin!', 'Check credentials')
            logs.append('Could not login super admin!')
            return
        else:
            logs.append('Superadmin logged in')
        
        cookies = dict(sessionid = resp.cookies['sessionid'])
        url = host + '/api/v1/users/'
        resp = s.get(url, cookies=cookies)
        print 'resp2 is ' + str(resp.status_code)
            
        print str(logs)
        
        querydict = {'company_id__ne' : 0}
        companies = Company.objects(**querydict)
        print 'found companies ' + str(len(companies))
        #now loop through each company find which systems are connected 
        for company in companies:
            company_id = company.company_id
            company_name = company.name
            print 'in company ' + company.name 
        
            existingIntegration = CompanyIntegration.objects(company_id = company_id).first()
            if existingIntegration is None: # no integration found so move to next company
                logs.append('No integration record for company ' + str(company_name))
                continue
            else: #skip this company if initial or delta run are in progress
                if 'initial_run_in_process' in existingIntegration and existingIntegration['initial_run_in_process'] == True:
                    logs.append('Initial run in process for company ' + str(company_name))
                    continue
                if 'delta_run_in_process' in existingIntegration and existingIntegration['delta_run_in_process'] == True:
                    logs.append('Delta run in process for company ' + str(company_name))
                    continue
                # look for either the last delta run date or the last initial run date
                sinceDateTime = None
                if 'delta_run_done' in existingIntegration:
                    if 'delta_run_last_date' in existingIntegration:
                        sinceDateTime = existingIntegration['delta_run_last_date']
                if sinceDateTime is None:
                    if 'initial_run_done' in existingIntegration:
                        if 'initial_run_last_date' in existingIntegration:
                            sinceDateTime = existingIntegration['initial_run_last_date']
                   
                if sinceDateTime is None:
                    logs.append('No start date for delta run for company ' + str(company_name))
                    continue
    
                sinceDateTime = int(time.mktime(time.strptime(_str_from_date(sinceDateTime), '%Y-%m-%dT%H:%M:%SZ')))
                sinceDateTime -= int(12*60*60) #move back 12 hours as a safety measure
                sinceDateTime = sinceDateTime * 1000
                print 'calling extract with ' + str(sinceDateTime)
                companyDataExtract(user_id=None, company_id=company_id, run_type='delta', sinceDateTime=sinceDateTime)
                
    except Exception as e:
        logs.append(str(e))
        print 'exception is ' + str(e)
Example #25
0
def dailyCronJob_Deprecated():
    try:
        logs = [] #holds all error messages for the job
        # first get the superadmin user and the companies
        user = _get_superadmin()
        if user is None:
            mail_admins('Could not find super admin!', 'Check settings')
            return # no superadmin found
        # remotely login the user
        host = settings.BASE_URL
        url = host + '/api/v1/auth/login/'
        creds = {'email': '*****@*****.**', 'password':'******'}
        s = requests.Session()
        resp = s.post(url, data=json.dumps(creds))
        if not resp.status_code == 200:
            mail_admins('Could not login super admin!', 'Check credentials')
            logs.append('Could not login super admin!')
            return
        else:
            logs.append('Superadmin logged in')
            
        cookies = dict(sessionid = resp.cookies['sessionid'])
        url = host + '/api/v1/users/'
        resp = s.get(url, cookies=cookies)
    
        companies = Company.objects.filter(~Q(company_id=0))
        
        #now loop through each company find which systems are connected 
        for company in companies:
            company_id = company.company_id
            company_name = company.name
            existingIntegration = CompanyIntegration.objects(company_id = company_id).first()
            if existingIntegration is None: # no integration found so move to next company
                continue
            
            #loop through the different source systems and call their respective tasks
            for source in existingIntegration.integrations.keys():
                if source == 'mkto':
                    #start by calling Mkto Lead Daily Retrieval (it calls Mkto Activity Daily Retrieval so no need to call Activity again)
                    # get meta data for activities 
                    url = host + '/api/v1/company/' + str(company_id) + '/integrations/metadata/'
                    params = {'code': source, 'object': 'activity'}
                    resp = s.get(url, params=params) # get metadata about activities
                    if not resp.status_code == 200:
                        logs.append('Could not retrieve metadata about activities from Marketo for company ' + str(company_name))
                        continue
                    else:
                        logs.append('Retrieved metadata about activities from Marketo for company ' + str(company_name))
                    # get meta data for leads 
                    params = {'code': source, 'object': 'lead'}
                    resp = s.get(url, params=params) # get metadata about leads
                    if not resp.status_code == 200:
                        logs.append('Could not retrieve metadata about leads from Marketo for company ' + str(company_name))
                        continue
                    else:
                        logs.append('Retrieved metadata about leads from Marketo for company ' + str(company_name))
                    # get activities and leads
                    url = host + '/api/v1/company/' + str(company_id) + '/leads/retrieve/daily/'
                    params = {'code': source}
                    resp = s.get(url, params=params) # get leads and activities
                    if not resp.status_code == 200:
                        logs.append('Could not retrieve activities and leads from Marketo for company ' + str(company_name))
                        continue
                    else:
                        logs.append('Retrieved activities and leads from Marketo for company ' + str(company_name))
                #if integrated to Salesforce    
                if source == 'sfdc':
                    #get metadata about contact
                    url = host + '/api/v1/company/' + str(company_id) + '/integrations/metadata/'
                    params = {'code': source, 'object': 'contact'}
                    resp = s.get(url, params=params) # get metadata
                    if not resp.status_code == 200:
                        logs.append('Could not retrieve contact metadata from SFDC for company ' + str(company_name))
                        continue
                    else:
                        logs.append('Retrieved metadata about contacts from SFDC for company ' + str(company_name))
                    #get contacts
                    url = host + '/api/v1/company/' + str(company_id) + '/contacts/retrieve/daily/'
                    params = {'code': source}
                    resp = s.get(url, params=params) # get metadata
                    if not resp.status_code == 200:
                        logs.append('Could not retrieve contacts from SFDC for company ' + str(company_name))
                        continue
                    else:
                        logs.append('Retrieved contacts from SFDC for company ' + str(company_name))
                    #get metadata about campaign
                    url = host + '/api/v1/company/' + str(company_id) + '/integrations/metadata/'
                    params = {'code': source, 'object': 'campaign'}
                    resp = s.get(url, params=params) # get metadata
                    if not resp.status_code == 200:
                        logs.append('Could not retrieve campaign metadata from SFDC for company ' + str(company_name))
                        continue
                    else:
                        logs.append('Retrieved metadata about campaigns from SFDC for company ' + str(company_name))
                    #get contacts
                    url = host + '/api/v1/company/' + str(company_id) + '/campaigns/retrieve/daily/'
                    params = {'code': source}
                    resp = s.get(url, params=params) # get metadata
                    if not resp.status_code == 200:
                        logs.append('Could not retrieve campaigns from SFDC for company ' + str(company_name))
                        continue
                    else:
                        logs.append('Retrieved campaigns from SFDC for company ' + str(company_name))
                    
            # run the daily analytics extract jobs
            # timeline chart
            url = host + '/api/v1/company/' + str(company_id) + '/analytics/calculate/'
            chart_title = 'Timeline'
            params = {'chart_name': 'sources_bar', 'system_type': 'MA', 'chart_title':chart_title, 'mode': 'Daily'}
            resp = s.get(url, params=params)
            if not resp.status_code == 200:
                logs.append('Could not retrieve data for ' + chart_title + ' for company ' + str(company_name))
                continue
            else:
                logs.append('Retrieved data for ' + chart_title + ' for company ' + str(company_name))    
            # pipeline duration chart
            chart_title = 'Pipeline Duration'
            params = {'chart_name': 'pipeline_duration', 'system_type': 'MA', 'chart_title':chart_title, 'mode': 'Daily'}
            resp = s.get(url, params=params)
            if not resp.status_code == 200:
                logs.append('Could not retrieve data for ' + chart_title + ' for company ' + str(company_name))
                continue
            else:
                logs.append('Retrieved data for ' + chart_title + ' for company ' + str(company_name))
            # contacts distribution chart
            chart_title = 'Contacts Distribution'
            params = {'chart_name': 'contacts_distr', 'system_type': 'MA', 'chart_title':chart_title, 'mode': 'Daily'}
            resp = s.get(url, params=params)
            if not resp.status_code == 200:
                logs.append('Could not retrieve data for ' + chart_title + ' for company ' + str(company_name))
                continue
            else:
                logs.append('Retrieved data for ' + chart_title + ' for company ' + str(company_name))
            # source pie chart
            chart_title = 'Source Breakdown'
            params = {'chart_name': 'source_pie', 'system_type': 'MA', 'chart_title':chart_title, 'mode': 'Daily'}
            resp = s.get(url, params=params)
            if not resp.status_code == 200:
                logs.append('Could not retrieve data for ' + chart_title + ' for company ' + str(company_name))
                continue
            else:
                logs.append('Retrieved data for ' + chart_title + ' for company ' + str(company_name))
            # revenue source pie chart
            chart_title = 'Revenue by Source'
            params = {'chart_name': 'revenue_source_pie', 'system_type': 'MA', 'chart_title':chart_title, 'mode': 'Daily'}
            resp = s.get(url, params=params)
            if not resp.status_code == 200:
                logs.append('Could not retrieve data for ' + chart_title + ' for company ' + str(company_name))
                continue
            else:
                logs.append('Retrieved data for ' + chart_title + ' for company ' + str(company_name))
        
        mail_admins('Daily extract job completed', '\n'.join(str(elem) for elem in logs))
        print 'Daily extract job completed'       
    
    except Exception as e:
        logs.append(str(e))
Example #26
0
def saveMktoActivitiesToMaster(user_id=None, company_id=None, job_id=None, run_type=None): 
    #job_id = ObjectId("56a2dd408afb006f9e7cb851") 
    
    
    if run_type == 'initial':   
        #activities = TempData.objects(Q(company_id=company_id) & Q(record_type='activity') & Q(source_system='mkto') & Q(job_id=job_id) ).only('source_record') 
        collection = TempData._get_collection()
        activities = collection.find({'company_id': int(company_id), 'record_type': 'activity', 'source_system': 'mkto', 'job_id': job_id}, projection={'source_record': True}, batch_size=1000)
        
    else:
        collection = TempDataDelta._get_collection()
        activities = collection.find({'company_id': int(company_id), 'record_type': 'activity', 'source_system': 'mkto', 'job_id': job_id}, projection={'source_record': True}, batch_size=1000)
        
        #activities = TempDataDelta.objects(Q(company_id=company_id) & Q(record_type='activity') & Q(source_system='mkto') & Q(job_id=job_id) ).only('source_record') 
    
    
#     activityListTemp = list(activities)
#     activityList = [i['source_record'] for i in activityListTemp]
    
    existingIntegration = CompanyIntegration.objects(company_id = company_id).first()
    if existingIntegration is not None:
        activityTypeArray = existingIntegration.integrations['mkto']['metadata']['activity']
    else:
        print 'No activity type metadata found for Marketo'
        raise ValueError('No activity type metadata found for Marketo')
    
    try: 
        for i in range(len(activityTypeArray)):
            if activityTypeArray[i]['name'] == 'Change Data Value':
                changeActivityId = activityTypeArray[i]['id']
        print 'change id is ' + str(changeActivityId)
        
        for activity in activities: 
            
            newActivity = activity['source_record']
            addThisActivity = True
            
            #company_id = request.user.company_id
            mkto_id = str(newActivity['leadId'])
            print 'doing lead ' + mkto_id
            
            existingLead = Lead.objects(Q(mkto_id = str(mkto_id)) & Q(company_id = company_id)).first()
            if existingLead is not None: # we found this lead to attach the activities
                if 'mkto' in existingLead.activities:
                    currentActivities = existingLead.activities['mkto']        
                    for i in range(len(currentActivities)):
                        if currentActivities[i]['id'] == newActivity['id']: #check if this activity already exists in the lead dict
                            addThisActivity = False
                            break
                    if addThisActivity == True:
                        for i in range(len(activityTypeArray)):
                            if activityTypeArray[i]['id'] == newActivity['activityTypeId']:
                                newActivity['activityTypeName'] = activityTypeArray[i]['name']
                                break
                        currentActivities.append(newActivity)
                        existingLead.update(activities__mkto = currentActivities)
                        
                else:
                    currentActivities = []
                    for i in range(len(activityTypeArray)):
                        if activityTypeArray[i]['id'] == newActivity['activityTypeId']:
                            newActivity['activityTypeName'] = activityTypeArray[i]['name']
                            break
                    currentActivities.append(newActivity)
                    existingLead.update(activities__mkto = currentActivities)
                #addThisActivity == True and    
                if addThisActivity == True and newActivity['activityTypeId'] == changeActivityId and newActivity['primaryAttributeValue'] == 'Lead Status':
                    print 'processing status activity for id ' + mkto_id
                    #statusRecord = [];
                    newStatus = ''
                    oldStatus = ''
                    for attribute in newActivity['attributes']:
                        if attribute['name'] == 'New Value':
                            newStatus = attribute['value']
                        elif attribute['name'] == 'Old Value':
                            oldStatus = attribute['value']
#                         elif attribute['name'] == 'Reason':
#                             reason = attribute['value']    
                            #break
                    #statusRecord.append({'status': newStatus, 'date': newActivity['activityDate']})
                    newActivity['newStatus'] = newStatus 
                    newActivity['oldStatus'] = oldStatus 
                    newActivity['date'] = newActivity['activityDate']
                    if 'mkto' in existingLead.statuses:
                        currentStatuses = existingLead.statuses['mkto']
                        currentStatuses.append(newActivity) # changed on 1/22/2016 {'status': newStatus, 'date': newActivity['activityDate']})
                        existingLead.update(statuses__mkto = currentStatuses)
                    else:
                        currentStatuses = []
                        currentStatuses.append(newActivity) # changed on 1/22/2016{'status': newStatus, 'date': newActivity['activityDate']})
                        existingLead.update(statuses__mkto = currentStatuses)
                        
#                 if addThisActivity == True: # this activity was not foudn in the lead, so add it
#                     existingLead.activities['mkto'].append(newActivity)
#                 existingLead.save() # no concept of saving the activity if the lead does not exist
    except Exception as e:
            send_notification(dict(
             type='error',
             success=False,
             message=str(e)
            ))   
Example #27
0
def companyDataExtract(user_id=None, company_id=None, run_type=None, sinceDateTime=None):
    superJobMonitor = None
    existingIntegration = None
    
    if run_type is None or company_id is None or sinceDateTime is None:
        return
    try:
        sinceDateTime = datetime.fromtimestamp(float(sinceDateTime) / 1000)
        #sinceDateTime = datetime.now() - timedelta(days=30)
        print 'start date is ' + str(sinceDateTime)
        print 'run type is ' + run_type
        print 'company id is ' + str(company_id)

        #create an entry in the Job Monitor
        superJobMonitor = SuperJobMonitor(company_id=company_id, type=run_type, started_date=datetime.utcnow())
        superJobMonitor.save()
        
        #do pre-check
        _superJobMonitorAddTask(superJobMonitor, "Claritix", "Pre-check started") 
        # get the integration record
        existingIntegration = CompanyIntegration.objects(company_id=company_id).first() 
        if existingIntegration is None:
            _superJobMonitorEnd(superJobMonitor, None, 'Failed', 'No integration record found') 
            mail_admins('Could not find integration record for company ' + company_id , 'Check settings')
            return False 
        if run_type == 'initial':   
            existingIntegration['initial_run_in_process'] = True #set the flag
        else: #delta
            existingIntegration['delta_run_in_process'] = True #set the flag
        existingIntegration.save() # save the flag   
        
        # set up the Request and Cookie
        user = _get_superadmin()
        if user is None:
            _superJobMonitorEnd(superJobMonitor, existingIntegration, 'Failed', 'No super admin found') 
            mail_admins('Could not find super admin!', 'Check settings')
            return False
            
        # remotely login the user
        host = settings.BASE_URL
        url = host + '/api/v1/auth/login/'
        creds = {'email': '*****@*****.**', 'password':'******'}
        s = requests.Session()
        resp = s.post(url, data=json.dumps(creds))
        if not resp.status_code == 200:
            _superJobMonitorEnd(superJobMonitor, existingIntegration, 'Failed', 'Could not login super admin!') 
            mail_admins('Could not login super admin!', 'Check credentials')
            return False
            
        #do cookie thing - refer to SuperAdmin Cron Job Task for details
        cookies = dict(sessionid = resp.cookies['sessionid'])
        url = host + '/api/v1/users/'
        resp = s.get(url, cookies=cookies)
        _superJobMonitorAddTask(superJobMonitor, "Claritix", "Pre-check completed") 

#         #delete data in Lead Master
#         if run_type == 'initial':
#             _superJobMonitorAddTask(superJobMonitor, "Claritix", "Deletion of Leads, Contacts, Opportunities and Activities started") 
#             count = Lead.objects(company_id=company_id).count()
#             Lead.objects(company_id=company_id).delete()
#             _superJobMonitorAddTask(superJobMonitor, "Claritix", str(count) + " Leads, Contacts, Opportunities and Activities deleted")
#                       
#         #delete data in Campaign Master
#         if run_type == 'initial':
#             _superJobMonitorAddTask(superJobMonitor, "Claritix", "Deletion of Campaigns started") 
#             count = Campaign.objects(company_id=company_id).count()
#             Campaign.objects(company_id=company_id).delete()
#             _superJobMonitorAddTask(superJobMonitor, "Claritix", str(count) + " Campaigns deleted")
#                
#         #delete data in Account Master
#         if run_type == 'initial':
#             _superJobMonitorAddTask(superJobMonitor, "Claritix", "Deletion of Accounts started") 
#             count = Account.objects(company_id=company_id).count()
#             Account.objects(company_id=company_id).delete()
#             _superJobMonitorAddTask(superJobMonitor, "Claritix", str(count) + " Accounts deleted")
#                             
        # find out which systems are integrated and therefore which  tasks should be run
        task_map = {
                    #"mkto" : [retrieveMktoCampaigns, retrieveMktoLeadsByProgram], #retrieveMktoLeadCreatedActivities, retrieveMktoLeads, retrieveMktoActivities, retrieveMktoCampaigns, retrieveMktoLeadsByProgram], #IMPORTANT - Lead Created Activities has to be before Leads
                    #"hspt" : [retrieveHsptCampaigns], # , ], # retrieveHsptLeads, retrieveHsptOpportunities, retrieveHsptWebsiteTraffic, ,   
                     "prdt" : [retrievePrdtLeads],
#                    "sfdc" : [retrieveSfdcCampaigns] #retrieveSfdcLeads, retrieveSfdcContacts, retrieveSfdcCampaigns, retrieveSfdcAccounts, retrieveSfdcOpportunities, retrieveSfdcLeadHistory, retrieveSfdcContactHistory, retrieveSfdcOppHistory, retrieveSfdcOppStageHistory],
                    #  "sugr" : [retrieveSugrLeads],                    
#                    "bufr" : [retrieveBufrTwInteractions], 
#                    "goog" : [retrieveGoogWebsiteTraffic], \
#                    "fbok" : [retrieveFbokPageStats, retrieveFbokAdStats]#, retrieveFbokPostStats] # , ]
                    }
#         # for future use - retrieveMktoContacts, retrieveMktoOpportunities, retrieveSfdcActivities, 
        final_task_map = {
#                     "mkto" : [saveMktoCampaignsToMaster, saveMktoLeadsByProgramToMaster], #saveMktoLeadsToMaster, saveMktoActivitiesToMaster, saveMktoCampaignsToMaster, saveMktoLeadsByProgramToMaster],#mergeMktoSfdcLeads, deleteLeads, deleteDuplicateMktoIdLeads #
                    #"hspt" : [saveHsptCampaignsToMaster ], #saveHsptLeadsToMaster, saveHsptOpportunitiesToMaster, saveHsptWebsiteTrafficToMaster, , ], # ,
#                     "sfdc" : [saveSfdcCampaignsToMaster], #saveSfdcLeadsToMaster, saveSfdcContactsToMaster, saveSfdcCampaignsToMaster, saveSfdcAccountsToMaster, saveSfdcOpportunitiesToMaster, saveSfdcLeadHistoryToMaster, saveSfdcContactHistoryToMaster, saveSfdcOppHistoryToMaster, saveSfdcOppStageHistoryToMaster],  # 
                    # "sugr" : [saveSugrLeadsToMaster], 
#                     "bufr" : [saveBufrTwInteractionsToMaster], 
#                     "goog": [saveGoogleWebsiteTrafficToMaster], 
#                     "fbok": [saveFbokPageStatsToMaster, saveFbokAdStatsToMaster]#, saveFbokPostStatsToMaster] #
                    }
#         #
# #         #saveSfdcLeadsToMaster, saveSfdcContactsToMaster, saveSfdcOpportunitiesToMaster, saveSfdcCampaignsToMaster, 
# #         # saveSfdcLeadsToMaster, saveSfdcContactsToMaster, saveSfdcOpportunitiesToMaster, saveSfdcCampaignsToMaster, saveSfdcAccountsToMaster
# #         #collect all relevant tasks in one list and retrieve metadata as well
        for source in existingIntegration.integrations.keys():
            #change metadata depending on source system
            if source == 'sfdc':
                metadata_objects = ['user', 'lead', 'contact', 'campaign', 'opportunity', 'task', 'account'] #[] #objects for which metadata should be collected
            elif source == 'mkto':
                metadata_objects = ['lead', 'activity', 'campaign'] #[] #objects for which metadata should be collected
            elif source == 'hspt':
                metadata_objects = ['lead'] #objects for which metadata should be collected
            else:
                metadata_objects = [] #objects for which metadata should be collected
            # if sfdc, explicitly refresh the access token
            if source == 'sfdc':
                sfdc = Salesforce()
                sfdc.refresh_token(company_id)
            #collect meta data
            url = host + '/api/v1/company/' + str(company_id) + '/integrations/metadata/'
#             for object in metadata_objects:
#                 _superJobMonitorAddTask(superJobMonitor, source, "Retrieval of metadata for " + object + " started")
#                 params = {'code': source, 'object': object}
#                 resp = s.get(url, params=params)  # get metadata about activities
#                 if not resp.status_code == 200:
#                     _superJobMonitorAddTask(superJobMonitor, source, "Retrieval of metadata for " + object + " failed")
#                     continue
#                 else:
#                     _superJobMonitorAddTask(superJobMonitor, source, "Retrieval of metadata for " + object + " completed")
                         
            #collect retrieval tasks
            print 'starting retrieval tasks'
            tasks = []
            if source in task_map:
                tasks.extend(task_map[source])
            #now run the tasks
            for task in tasks:
                _superJobMonitorAddTask(superJobMonitor, source, task.__name__ + " started to store temp data")
                print 'starting task ' + str(task)
                task(user_id=user_id, company_id=company_id, job_id=superJobMonitor.id, run_type=run_type, sinceDateTime=sinceDateTime)
                _superJobMonitorAddTask(superJobMonitor, source, task.__name__ + " completed")
                  
            print 'starting save tasks'    
            #collect save tasks
            tasks = []
            if source in final_task_map:
                tasks.extend(final_task_map[source])
            #now run the tasks
            for task in tasks:
                _superJobMonitorAddTask(superJobMonitor, source, task.__name__ + " started to save master data")
                print 'starting task ' + str(task)
                task(user_id=user_id, company_id=company_id, job_id=superJobMonitor.id, run_type=run_type)
                _superJobMonitorAddTask(superJobMonitor, source, task.__name__ + " completed")
             
        #return #REMOVE THIS IN PRODUCTION  
        #if initial run, delete all analytics data
#         if run_type == 'initial':
#             _superJobMonitorAddTask(superJobMonitor, 'Claritix', "Deletion of analytics data started")
#             count1 = AnalyticsData.objects(company_id=company_id).count() #ensure that website_traffic chart data is not deleted
#             AnalyticsData.objects(company_id=company_id).delete() #ensure that website_traffic chart data is not deleted
#             count2 = AnalyticsIds.objects(company_id=company_id).count()
#             AnalyticsIds.objects(company_id=company_id).delete()
#             _superJobMonitorAddTask(superJobMonitor, 'Claritix', str(count1 + count2) + " records deleted from analytics tables")
#                
        # call chart calculate tasks
        charts = [\
#                     {'chart_name': 'sources_bar', 'system_type': 'MA', 'chart_title':'Timeline', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     #{'chart_name': 'pipeline_duration', 'system_type': 'MA', 'chart_title':'Pipeline Duration', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     #{'chart_name': 'contacts_distr', 'system_type': 'MA', 'chart_title':'Contacts Distribution', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     {'chart_name': 'source_pie', 'system_type': 'MA', 'chart_title':'Source Distribution', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     {'chart_name': 'revenue_source_pie', 'system_type': 'MA', 'chart_title':'Revenue by Source', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     #{'chart_name': 'multichannel_leads', 'system_type': 'MA', 'chart_title':'Multichannel Leads', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     {'chart_name': 'tw_performance', 'system_type': 'SO', 'chart_title':'Twitter Performance', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     {'chart_name': 'fb_performance', 'system_type': 'SO', 'chart_title':'Facebook_Performance', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     {'chart_name': 'google_analytics', 'system_type': 'AD', 'chart_title':'Google Analytics', 'mode': run_type, 'start_date': sinceDateTime}, \
#                      {'chart_name': 'campaign_email_performance', 'system_type': 'MA', 'chart_title':'Campaign Performance by Email', 'mode': run_type, 'start_date': sinceDateTime}, \
#                      {'chart_name': 'email_cta_performance', 'system_type': 'MA', 'chart_title':'Email Performance by CTA', 'mode': run_type, 'start_date': sinceDateTime}, \
#                 
                ]
        
        url = host + '/api/v1/company/' + str(company_id) + '/analytics/calculate/'
        for chart in charts:
            print 'starting chart ' + str(chart['chart_title'])
            _superJobMonitorAddTask(superJobMonitor, 'Claritix', "Started calculating " + str(chart['chart_title']))
            resp = s.get(url, params=chart)
            if not resp.status_code == 200:
                print 'incorrect status code was ' + str(resp.status_code)
                _superJobMonitorAddTask(superJobMonitor, 'Claritix', 'Could not retrieve data for ' + chart['chart_title'])
                continue
            else:
                _superJobMonitorAddTask(superJobMonitor, 'Claritix', 'Retrieved data for ' + chart['chart_title'])    
        
        # call dashboard calculate tasks
        dashboards = [\
#                     {'chart_name': 'social_roi', 'system_type': 'MA', 'chart_title':'Social Performance', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     {'chart_name': 'funnel', 'system_type': 'MA', 'chart_title':'Funnel', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     {'chart_name': 'opp_funnel', 'system_type': 'CRM', 'chart_title':'Opportunity Funnel', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     {'chart_name': 'waterfall_chart', 'system_type': 'MA', 'chart_title':'Waterfall Chart', 'mode': run_type, 'start_date': sinceDateTime}, \
#                     {'chart_name': 'form_fills', 'system_type': 'MA', 'chart_title':'Form Fills', 'mode': run_type, 'start_date': sinceDateTime}, \
                    ]
        
        url = host + '/api/v1/company/' + str(company_id) + '/dashboards/calculate/'
        for dashboard in dashboards:
            print 'starting dashboard ' + str(dashboard['chart_title'])
            _superJobMonitorAddTask(superJobMonitor, 'Claritix', "Started calculating " + str(dashboard['chart_title']))
            resp = s.get(url, params=dashboard)
            if not resp.status_code == 200:
                print 'incorrect status code was ' + str(resp.status_code)
                _superJobMonitorAddTask(superJobMonitor, 'Claritix', 'Could not retrieve data for ' + dashboard['chart_title'])
                continue
            else:
                _superJobMonitorAddTask(superJobMonitor, 'Claritix', 'Retrieved data for ' + dashboard['chart_title'])    
        
        #delete all data from past successful initial runs in Temp Table
#         if run_type == 'initial':
#             _superJobMonitorAddTask(superJobMonitor, "Claritix", "Deletion of temp data table started") 
#             successfulJobs = SuperJobMonitor.objects(Q(company_id=company_id) & Q(type='initial') & Q(status='Completed'))
#             successfulJobsListTemp = list(successfulJobs)
#             print 'found job ids ' + str(len(successfulJobsListTemp))
#             successfulJobsList = [i.id for i in successfulJobsListTemp]
#             count = TempData.objects(job_id__in=successfulJobsList).count()
#             TempData.objects(job_id__in=successfulJobsList).delete()
#             _superJobMonitorAddTask(superJobMonitor, "Claritix", str(count) + " records deleted from temp data table")
        #delete all data from past successful delta runs in Temp Table
        if run_type == 'delta':
            _superJobMonitorAddTask(superJobMonitor, "Claritix", "Deletion of temp data delta table started") 
            successfulJobs = SuperJobMonitor.objects(Q(company_id=company_id) & Q(type='delta') & Q(status='Completed'))
            successfulJobsListTemp = list(successfulJobs)
            print 'found delta job ids ' + str(len(successfulJobsListTemp))
            successfulJobsList = [i.id for i in successfulJobsListTemp]
            count = TempDataDelta.objects(job_id__in=successfulJobsList).count()
            TempDataDelta.objects(job_id__in=successfulJobsList).delete()
            _superJobMonitorAddTask(superJobMonitor, "Claritix", str(count) + " records deleted from temp data delta table")
        #update last run date for initial run in Company Integration record
        
        _superJobMonitorEnd(superJobMonitor, existingIntegration, run_type, 'Completed', 'All tasks completed successfully') 
        return True

    except Exception as e:
        if superJobMonitor is not None and existingIntegration is not None:
            _superJobMonitorEnd(superJobMonitor, existingIntegration, run_type, 'Failed', str(e)) 
        print str(e)
        return False
Example #28
0
def getSuperFilters(request, company_id):
    
    user_id = request.user.id
    company_id = request.user.company_id
    object_type = request.GET.get('object_type')
    system_type = request.GET.get('system_type')
    #existingIntegration = CompanyIntegration.objects(company_id = company_id ).first()
    existingIntegration = {}
    existingIntegration['integrations'] = {}
    
    map = Code("function () {"
             "  for (var key in this.integrations) emit(key, this.integrations[key]['filters']); } ")
    
    reduce = Code("function (key, values) { return null; } ")
    
    results = CompanyIntegration.objects(company_id=company_id).map_reduce(map, reduce, "inline")
    results = list(results)
    for result in results:
        existingIntegration['integrations'][result.key] = {'filters': result.value}
    try:
        code = None
        if existingIntegration is not None:
            for source in existingIntegration['integrations'].keys():
                defined_system_type = SuperIntegration.objects(Q(code = source) & Q(system_type = system_type)).first()
                if defined_system_type is not None:
                    code = source
            print 'found code' + str(code)
                  
        if code is  None:
            raise ValueError("No integrations defined")  
        else:
            super_filters = SuperFilters.objects(source_system = code).first()
            if super_filters is None:
                result = []
            else:
                if object_type not in super_filters['filters']:
                    result = []
                else:                    
                    temp = super_filters['filters'].get(object_type, None)
                    filters = existingIntegration['integrations'][code].get('filters', None)
                    if filters is not None:
                        filter_obj = filters.get(object_type, None)
                        if filter_obj is None:
                            return JsonResponse({'results': temp}, safe=False)
                        for filter, values in filter_obj.items():
                            if filter in temp:
                                if filter == 'OwnerId': #reduce the users list to only those with opportunities
                                    temp_values = {}
                                    opp_field_qry = 'opportunities__sfdc__exists'
                                    company_field_qry = 'company_id'
                                    projection = {'$project': {'owner_id': '$opportunities.sfdc.OwnerId'  } }
                                    querydict = {opp_field_qry: True, company_field_qry: company_id}
                                    opps = Account.objects(**querydict).aggregate({'$unwind': '$opportunities.sfdc'}, projection)
                                    opps = list(opps)
                                    opps_owner_ids = [opp['owner_id'] for opp in opps]
                                    print 'opp owner ids ' + str(opps_owner_ids)
                                    tempValues = [value for value in values['values'] if value['value'] in opps_owner_ids]
                                    print 'temp values2 is ' + str(tempValues)
                                    temp_values['values'] = tempValues
                                    temp_values['label'] = values['label']
                                    values = temp_values
                                values['values'].sort()
                                temp[filter] = values
                    result = {'results': temp}
            #result =  'Nothing to report'
        return JsonResponse(result, safe=False)
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #29
0
def getCampaigns(request, id):
    try:
        company_id = request.user.company_id
        system_type = request.GET.get('system_type')
        page_number = int(request.GET.get('page_number'))
        items_per_page = int(request.GET.get('per_page'))
        start_date = int(request.GET.get('start_date'))
        end_date = int(request.GET.get('end_date'))
        sub_view = request.GET.get('subview')
        filters = request.GET.get('filters')
        filters = json.loads(filters)
        superfilters = request.GET.get('superfilters')
        super_filters = json.loads(superfilters)
        #print 'super filters are ' + str(super_filters)
        date_field = None
        querydict_filters = {}
        
        offset = (page_number - 1) * items_per_page
        
#         existingIntegration = CompanyIntegration.objects(company_id = company_id ).first()
#         code = None
#         if existingIntegration is not None:
#             for source in existingIntegration.integrations.keys():
#                 defined_system_type = SuperIntegration.objects(Q(code = source) & Q(system_type = system_type)).first()
#                 if defined_system_type is not None:
#                     code = source
#                     
#         if code is None:
#             return JsonResponse({'Error' : 'Marketing Automation system not found'})
        
        #projection = {'$project': {'_id': '$campaigns.' + code + '.id', 'created_date': '$campaigns.' + code + '.createdAt', 'name': '$campaigns.' + code + '.name', 'description': '$campaigns.' + code + '.description', 'url': '$campaigns.' + code + '.url', 'type': '$campaigns.' + code + '.type', 'channel': '$campaigns.' + code + '.channel', } }
        match = {'$match' : { }}
        date_field = None
        collection = Campaign._get_collection()
        company_field_qry = 'company_id'
        querydict = {company_field_qry: company_id}
        code = ''
        
        if super_filters is not None:
            if 'date_types' in super_filters: # need to filter by a certain type of date
                date_field = super_filters['date_types']
                if date_field is not None:
                    if start_date is not None:
                        start_date = datetime.fromtimestamp(float(start_date) / 1000)
                    if end_date is not None:
                        end_date = datetime.fromtimestamp(float(end_date) / 1000)
    
                    local_start_date = get_current_timezone().localize(start_date, is_dst=None)
                    utc_day_start = local_start_date.astimezone(pytz.timezone('UTC'))
                    utc_day_start_string = datetime.strftime(utc_day_start, '%Y-%m-%dT%H:%M:%SZ+0000')
                    utc_day_start_string_crm = datetime.strftime(utc_day_start, '%Y-%m-%dT%H:%M:%S.000+0000')
                    
                    local_end_date = get_current_timezone().localize(end_date, is_dst=None)
                    utc_day_end = local_end_date.astimezone(pytz.timezone('UTC'))
                    utc_day_end_string = datetime.strftime(utc_day_end, '%Y-%m-%dT%H:%M:%SZ+0000')
                    utc_day_end_string_crm = datetime.strftime(utc_day_end, '%Y-%m-%dT%H:%M:%S.000+0000')
                    #print 'utc start string is ' + str(utc_day_start_string)
                    #print 'utc end string is ' + str(utc_day_end_string)
                    #remove the date_types item 
                    #super_filters.pop('date_types')
                
                    date_field_original = date_field
                    date_field = date_field.replace('.', '__')
                    date_field_start_qry =  date_field + '__gte'
                    date_field_end_qry = date_field + '__lte'
        
        if filters is not None:
            for key, value in filters.items():
                if value is not None and value != '':
                    querydict_filters['campaigns__' + code + '__' + key] = value #creates an additional querydict that can be added to the main qd
                    match['$match']['campaigns.' + code + '.' + key] = value
                    
        if sub_view == 'allcampaigns':
            if date_field is None:
                total = collection.find({'company_id': int(company_id)}).count() #.hint('company_id_1')
                queryset = Campaign.objects(**querydict).skip(offset).limit(items_per_page)
            else:
                total = collection.find({'company_id': int(company_id), date_field_original: {'$gte':utc_day_start_string, '$lte':utc_day_end_string}}).count() #.hint('company_id_1')
                querydict[date_field_start_qry] = utc_day_start_string 
                querydict[date_field_end_qry] = utc_day_end_string
                queryset = Campaign.objects(**querydict).skip(offset).limit(items_per_page)
        elif sub_view == 'onlyma' or sub_view == 'onlycrm':
            if sub_view == 'onlyma':
                code = _get_system(company_id, 'MA')
            else:
                code = _get_system(company_id, 'CRM')
            if code is None:
                return JsonResponse({'Error' : 'No source system found'})
            querydict['source_system'] = code
            if date_field is None:
                total = collection.find({'company_id': int(company_id), 'source_system': code}).count() #.hint('company_id_1')
                queryset = Campaign.objects(**querydict).skip(offset).limit(items_per_page)
            else:
                if sub_view == 'onlycrm' and code == 'sfdc':
                    if date_field_original == 'campaigns.mkto.createdAt':
                        date_field_original = 'campaigns.sfdc.CreatedDate'
                        date_field_start_qry = 'campaigns__sfdc__CreatedDate__gte'
                        date_field_end_qry = 'campaigns__sfdc__CreatedDate__lte'
                    elif date_field_original == 'campaigns.mkto.updatedAt':
                        date_field_original = 'campaigns.sfdc.LastModifiedDate'
                        date_field_start_qry = 'campaigns__sfdc__LastModifiedDate__gte'
                        date_field_end_qry = 'campaigns__sfdc__LastModifiedDate__lte'
                total = collection.find({'company_id': int(company_id), 'source_system': code, date_field_original: {'$gte':utc_day_start_string, '$lte':utc_day_end_string}}).count() #.hint('company_id_1')
                querydict[date_field_start_qry] = utc_day_start_string 
                querydict[date_field_end_qry] = utc_day_end_string
                queryset = Campaign.objects(**querydict).skip(offset).limit(items_per_page)
        elif sub_view == 'crmfromma' or sub_view == 'crmnotma':
            code = _get_system(company_id, 'CRM')
            if code is None:
                return JsonResponse({'Error' : 'No source system found'})
            querydict['source_system'] = code
            mapping = CompanyIntegration.objects(company_id=company_id).only('mapping').first()
            print 'mapping is ' + str(mapping)
            if mapping is None or len(mapping) == 0:
                return JsonResponse({'Error' : 'No mapping found in company settings'})
            ma_user = None
            ma_code = _get_system(company_id, 'MA')
            if ma_code == 'mkto': 
                ma_user = mapping['mapping'].get('mkto_sync_user', None)
            if ma_user is None or ma_code is None:
                return JsonResponse({'Error' : 'No marketing automation details found'})
            if code == 'sfdc':
                if sub_view == 'crmfromma':
                    user_field_qry = 'campaigns.sfdc.CreatedById'
                    querydict['campaigns__sfdc__CreatedById'] = ma_user
                else:
                    user_field_qry = 'campaigns.sfdc.CreatedById__ne'
                    querydict['campaigns__sfdc__CreatedById__ne'] = ma_user
                if date_field is None:
                    total = collection.find({'company_id': int(company_id), 'source_system': code, user_field_qry: ma_user}).count() #.hint('company_id_1')
                    queryset = Campaign.objects(**querydict).skip(offset).limit(items_per_page)
                else:
                    if date_field_original == 'campaigns.mkto.createdAt':
                        date_field_original = 'campaigns.sfdc.CreatedDate'
                        date_field_start_qry = 'campaigns__sfdc__CreatedDate__gte'
                        date_field_end_qry = 'campaigns__sfdc__CreatedDate__lte'
                    elif date_field_original == 'campaigns.mkto.updatedAt':
                        date_field_original = 'campaigns.sfdc.LastModifiedDate'
                        date_field_start_qry = 'campaigns__sfdc__LastModifiedDate__gte'
                        date_field_end_qry = 'campaigns__sfdc__LastModifiedDate__lte'
                    total = collection.find({'company_id': int(company_id), 'source_system': code, user_field_qry: ma_user, date_field_original: {'$gte':utc_day_start_string, '$lte':utc_day_end_string}}).count() #.hint('company_id_1')
                    querydict[date_field_start_qry] = utc_day_start_string 
                    querydict[date_field_end_qry] = utc_day_end_string
                    queryset = Campaign.objects(**querydict).skip(offset).limit(items_per_page)
            
        
        serializer = CampaignSerializer(queryset, many=True)   
        type = 'campaigns'
        return JsonResponse({'count' : total, 'results': serializer.data, 'type': type})    
    except Exception as e:
        return JsonResponse({'Error' : str(e)})
Example #30
0
def saveSfdcContactsToMaster(user_id=None, company_id=None, job_id=None, run_type=None):
    # delete later
    # job_id = ObjectId("569adcfc8afb00205c799f28")
    if run_type == "initial":
        contacts = TempData.objects(
            Q(company_id=company_id) & Q(record_type="contact") & Q(source_system="sfdc") & Q(job_id=job_id)
        ).only(
            "source_record"
        )  # & Q(job_id=job_id)
    else:
        contacts = TempDataDelta.objects(
            Q(company_id=company_id) & Q(record_type="contact") & Q(source_system="sfdc") & Q(job_id=job_id)
        ).only(
            "source_record"
        )  # & Q(job_id=job_id)
    print "co id is " + str(company_id)
    contactListTemp = list(contacts)
    contactList = [i["source_record"] for i in contactListTemp]
    # print 'saving sfdc contacts'
    try:
        # get the custom field for Contact Status, if it exists
        existingIntegration = CompanyIntegration.objects(company_id=company_id).first()
        contact_status = None
        if "sfdc" in existingIntegration["integrations"]:
            contact_status = existingIntegration["mapping"].get("sfdc_contact_status", None)

        for newContact in contactList:  # ['records']:

            # company_id = request.user.company_id
            sfdc_contact_Id = str(newContact["Id"])
            print "contact id is " + sfdc_contact_Id
            # sfdc_mkto_id = str(newLead['sfdcLeadId']) #check if there is a corresponding lead from MKTO
            existingLeadMkto = None
            existingLeadSfdc = None
            existingLeadHspt = None
            existingContact = Lead.objects(Q(company_id=company_id) & Q(sfdc_contact_id=sfdc_contact_Id)).first()

            if existingContact is not None:  # we found this contact already in the DB
                print "found contact match for " + str(sfdc_contact_Id)
                if "sfdc" in existingContact.contacts:
                    existingContact.source_first_name = newContact["FirstName"]
                    existingContact.source_last_name = newContact["LastName"]
                    existingContact.source_email = newContact["Email"]
                    # existingContact.source_created_date = str(newContact['CreatedDate'])
                    existingContact.source_source = newContact["LeadSource"]
                    if contact_status is not None and contact_status in newContact:
                        existingContact.source_status = newContact[contact_status]
                    existingContact.contacts["sfdc"] = newContact
                    existingContact.save()
                    # Lead.objects(Q(company_id=company_id) & Q(sfdc_contact_id=sfdc_contact_Id)).update(contacts__sfdc=newContact)
                else:
                    existingContact.contacts["sfdc"] = {}
                    existingContact.contacts["sfdc"] = newContact
                    existingContact.save()
            # elif existingContact is None:  # this lead does not exist
            else:
                existingLeadSfdc = (
                    Lead.objects(Q(company_id=company_id) & Q(leads__sfdc__ConvertedContactId=sfdc_contact_Id))
                    .hint("company_id_1_leads.sfdc.convertedContactId_1")
                    .first()
                )
                if existingLeadSfdc is not None:
                    print "found match for sfdc lead for contact " + str(sfdc_contact_Id)
                    # existingLeadSfdcList = list(existingLeadSfdc)
                    existingLeadSfdc.sfdc_contact_id = sfdc_contact_Id
                    if contact_status is not None and contact_status in newContact:
                        existingLeadSfdc.source_status = newContact[contact_status]
                    existingLeadSfdc.contacts = {}
                    existingLeadSfdc.contacts["sfdc"] = newContact
                    existingLeadSfdc.save()
                    # remove below comments after figuring out how Mkto stored SFDC contact ID
                else:
                    existingLeadMkto = Lead.objects(
                        Q(company_id=company_id) & Q(leads__mkto__sfdcContactId=sfdc_contact_Id)
                    ).first()
                    if (
                        existingLeadMkto is not None
                    ):  # we found a MKto lead record which is matched to this new Sfdc lead
                        print "found mkto lead" + existingLeadMkto.mkto_id
                        existingLeadMkto.sfdc_contact_id = sfdc_contact_Id
                        # existingLeadMkto.contacts = {}
                        existingLeadMkto.contacts["sfdc"] = newContact
                        existingLeadMkto.save()
                    else:
                        existingLeadHspt = Lead.objects(
                            Q(company_id=company_id) & Q(leads__hspt__properties__salesforcecontactid=sfdc_contact_Id)
                        ).first()
                        if (
                            existingLeadHspt is not None
                        ):  # we found a MKto lead record which is matched to this new Sfdc lead
                            existingLeadHspt.sfdc_contact_id = sfdc_contact_Id
                            existingLeadHspt.contacts = {}
                            existingLeadHspt.contacts["sfdc"] = newContact
                            existingLeadHspt.save()
            if (
                existingLeadSfdc is None
                and existingLeadMkto is None
                and existingLeadHspt is None
                and existingContact is None
            ):  # no matches found so save new record
                lead = Lead()
                lead.sfdc_contact_id = sfdc_contact_Id
                lead.company_id = company_id
                lead.source_first_name = newContact["FirstName"]
                lead.source_last_name = newContact["LastName"]
                lead.source_email = newContact["Email"]
                lead.source_created_date = str(newContact["CreatedDate"])
                lead.source_source = newContact["LeadSource"]
                if contact_status is not None and contact_status in newContact:
                    lead.source_status = newContact[contact_status]
                lead.contacts = {}
                lead.contacts["sfdc"] = newContact
                lead.save()
            # lead = Lead()
    #             company_id = request.user.company_id
    #             derived_id = 'sfdc_' + str(newLead['Id'])
    #             Lead.objects(derived_id = derived_id).modify(upsert=True, new=True, set__leads__sfdc = newLead, set_on_insert__derived_id = derived_id, set_on_insert__company_id = company_id)
    #
    #             oldLead = Lead.objects(derived_id = lead.derived_id)
    #             if oldLead.count() == 0:
    #                 lead.leads["sfdc"] = newLead
    #                 lead.save()
    #             else:
    #                 oldLead.leads["sfdc"] = newLead
    #                 Lead.objects(derived_id = lead.derived_id).update(oldLead)
    except Exception as e:
        print "exception while saving SFDC contact " + str(e)
        send_notification(dict(type="error", success=False, message=str(e)))