Ejemplo n.º 1
0
 def __init__(self, platform, startdate, enddate):
     self.startdate = startdate     
     self.enddate = enddate
     self.platform = platform
     self.reports = MongoConnector('reports').collection
     self.stream_logger = StreamLogger.getLogger(__name__)
     self.reporter_client = ReporterClient(self.platform)
Ejemplo n.º 2
0
 def __init__(self, platform, date, mode='prod'):
     super().__init__()
     self.reports = MongoConnector('reports').collection
     self.date = date     
     self.platform = platform
     self.publisher_fees = publisher_fees()
     self.inventory = inventory()
     self.platforms = platforms()
     self.reports_table = 'reports_e2e_tests' if mode == 'test' else 'reports_e2e_tests'
     self.insert_values = []
     self.messages = []
Ejemplo n.º 3
0
 def load(self, platform=None):
     reports = MongoConnector('reports').collection
     filter_dict = {'platform':platform} if platform else {}
     reports.delete_many(filter_dict)
     pl_table = [platform] if platform else self.map_table 
     for pl in pl_table:
         mdata = self._get_mysql_data(pl)
         #print(pl)
         try:
             reports.insert_many(mdata)
         except BulkWriteError as bws:
             exc_type, exc_value, exc_traceback = sys.exc_info()
             
Ejemplo n.º 4
0
class ReporterPusher(SSH_Client):
        
    def __init__(self, platform, date, mode='prod'):
        super().__init__()
        self.reports = MongoConnector('reports').collection
        self.date = date     
        self.platform = platform
        self.publisher_fees = publisher_fees()
        self.inventory = inventory()
        self.platforms = platforms()
        self.reports_table = 'reports_e2e_tests' if mode == 'test' else 'reports_e2e_tests'
        self.insert_values = []
        self.messages = []
        
    def is_float(self, str):
        if str is None:
            return False
        try:
            float(str)
            return True
        except ValueError:
            return False 

    
    def build_inserts(self):
        ID_RX = re.compile(r'^(\d+)_.*$')
        SIZE_RX = re.compile(r'^.*_(\d+[xX]\d+)$')
        
        inventory_dict = self.inventory.get()
        publisher_fees = self.publisher_fees.get()
        platforms_ids = self.platforms.get()
        filters = {"date":self.date} 
        projection = {"date":1, "platform":1, "placement_name":1, "clicks":1, 
                      "resold_impressions":1, "revenue_dict.EUR":1, 
                      "revenue_dict.USD":1}
        
        if self.platform != 'all':
            filters.update({
                "platform": self.platform
                })
        
        rows = self.reports.find(filters, projection)
        self.row_len = 0
        self.id_reject_count = 0
        self.size_reject_count = 0
        self.inventory_reject_count = 0
        self.publisher_fee_reject_count = 0
        self.revenue_reject_count = 0
        self.insert_values_eurohoops = 0
        
        for row in rows:
            #print(row)
            placement_name = row['placement_name']
            platform_name = row['platform']
            
            # ignore appnexus placements
            if platform_name == 'appnexus':
                continue
            
            self.row_len += 1
            if platform_name == 'taboola':
                #m_id =  re.match(r'^.*#(\d+)$', placement_name)
                m_id =  re.match(r'^.*-.(\d+)$', placement_name)
                # old naming convention for taboola
                if m_id is None:
                    m_id =  re.match(r'^.*#(\d+)$', placement_name)
            elif platform_name == 'teads':
                m_id =  re.match(r'^.*_(\d+)$', placement_name)        
            else:
                m_id = ID_RX.match(placement_name)
    
            if m_id is None:
                self.id_reject_count += 1
                self.stream_logger.info('Platform %s. Rejecting "%s". Could_not_parse_placement_id' % (platform_name, placement_name))
                self.messages.append('Platform {0}. Rejecting "{1}". Could_not_parse_placement_id'.format(platform_name, placement_name))
                continue
            else:
                placement_id = m_id.group(1)
                
            m_size = SIZE_RX.match(placement_name)
            
            if m_size is None and platform_name != 'taboola' and platform_name != 'teads':
                self.size_reject_count += 1
                self.stream_logger.info('Platform %s. Rejecting "%s". Could_not_parse_placement_size' % (platform_name, placement_name))
                self.messages.append('Platform {0}. Rejecting "{1}". Could_not_parse_placement_size'.format(platform_name, placement_name))
                continue
            else:
                if platform_name == 'taboola':
                    m_id =  re.match(r'^.*-(.+)$', placement_name)
                    if m_id == None:
                        # old naming convention for taboola
                        if placement_name.startswith('Remnant'):
                            placement_type = 'r'
                        else:
                            placement_type = 'p'    
                    else:        
                        placement_type = m_id.group(1)[0]
                
                    if placement_type == 'r':   
                    #if placement_name.startswith('Remnant'):
                        m_size =  re.match(r'^.*#(\d+[xX]\d+)#.+$', placement_name)
                        if m_size is None:
                            self.size_reject_count += 1
                            self.stream_logger.info('Platform %s. Rejecting "%s". Could_not_parse_placement_size' % (platform_name, placement_name))
                            self.messages.append('Platform {0}. Rejecting "{1}". Could_not_parse_placement_size'.format(platform_name, placement_name))
                            continue
                        placement_size = m_size.group(1)
                    else:    
                        placement_size = 'Content Unit'
                elif platform_name == 'teads':
                    placement_size = 'Magic Ads'
                else:
                    placement_size = m_size.group(1)
            # Get insert row data from dailies view
            insert_row = {
                'date': row['date'],
                'size': placement_size if placement_size else None,
                'imps': row['resold_impressions'] if 'resold_impressions' in row else 0,
                'clicks': row['clicks'] if 'clicks' in row else 0,
                'placement_id': placement_id,
                'revenue_usd': row['revenue_dict']['USD'],
            }
    
            # Update insert row from inventory table
            plc_id_str = str(insert_row['placement_id'])
            if plc_id_str not in inventory_dict:
                self.inventory_reject_count += 1
                self.stream_logger.info('Platform %s. Rejecting "%s". Could_not_find_it_in_inventory_table' % (platform_name, placement_name))
                self.messages.append('Platform {0}. Rejecting "{1}". Could_not_find_it_in_inventory_table'.format(platform_name, placement_name))
                continue
            else:
                insert_row.update(inventory_dict[plc_id_str])
    
            # Check if we have a PA_MENA publisher and insert usd for revenue
            if inventory_dict[plc_id_str]['publisher_name'].startswith('PA_MENA'):
                insert_row['revenue'] = row['revenue_dict']['USD']
            else:
                insert_row['revenue'] = row['revenue_dict']['EUR']
            
            if  not self.is_float(insert_row['revenue']):
                self.revenue_reject_count += 1
                self.stream_logger.info('Platform %s. Rejecting "%s". revenue_is_not_float' % 
                                  (platform_name, placement_name))
                self.messages.append('Platform {0}. Rejecting "{1}". revenue_is_not_float'.format(platform_name, placement_name))
                continue
            
            if insert_row['publisher_id'] not in publisher_fees:
                self.publisher_fee_reject_count += 1
                self.stream_logger.info('Platform {0}. Rejecting "{1}". Could_not_find_fee_for_publisher_id_{2}'.
                                        format(platform_name, placement_name, insert_row['publisher_id']))
                self.messages.append('Platform {0}. Rejecting "{1}". Could_not_find_fee_for_publisher_id_{2}'.
                                        format(platform_name, placement_name, insert_row['publisher_id']))
                continue
            else:
                insert_row['revenue_est_net'] = publisher_fees[insert_row['publisher_id']][0] * float(insert_row['revenue'])
            
            # Update insert row with standard fields, buyer_member_id='2725',
            insert_row.update( {
                        'buyer_member_id': platforms_ids[platform_name] if platform_name != 'appnexus' else None,
                        'brand_id': '17', }
                    )
    
            # Append the insert value statement
            self.insert_values.append("\n('{date}', '{size}', '{imps}', '{clicks}', '{revenue}', '{revenue_usd}', '{revenue_est_net}', '{publisher_id}', '{site_id}', '{placement_id}', '{buyer_member_id}', '{brand_id}')".format(**insert_row))
            
            # patch for Eurohoops
            if insert_row['publisher_id'] == '475297' and insert_row['site_id'] == '2551048':
                self.insert_values_eurohoops += 1 
                insert_row['publisher_id'] = '111111'
                insert_row['revenue_est_net'] = publisher_fees[insert_row['publisher_id']][0] * insert_row['revenue_est_net']
                insert_row['revenue'] = insert_row['revenue_est_net'] / 0.70
                insert_row['revenue_usd'] = ''
                # Append the insert value statement
                self.insert_values.append("\n('{date}', '{size}', '{imps}', '{clicks}', '{revenue}', '{revenue_usd}', '{revenue_est_net}', '{publisher_id}', '{site_id}', '{placement_id}', '{buyer_member_id}', '{brand_id}')".format(**insert_row))
    
    @twpamonitorclient.context('Push Data To UI', 'push_data', {})            
    def push_dailies_to_ui(self, monitor):
        self.stream_logger.info('Begin push_dailies_to_ui')
        
        monitor.log("Start push for platform {0}".format(self.platform))
        
        if self.platform is None:
            self.platform = 'all'
                                    
        # Parse inventory file to get inventory data
        inventory_dict = self.inventory.get()
        #print(inventory_dict)
        # Parse publisher fees file
        publisher_fees = self.publisher_fees.get()
        #print(publisher_fees)
        #exit(0)                                         
        # Parse platforms file
        platforms_ids = self.platforms.get()             
        self.stream_logger.debug('Publisher fees {0}'.format(publisher_fees))
        # Scrap dates from publisher fees dictionary
        publisher_fees = { k: publisher_fees[k][0] for k in publisher_fees}
        self.stream_logger.debug('Publisher fees {0}'.format(publisher_fees))
        self.stream_logger.debug('Platform ids {0}'.format(platforms_ids))
    
        self.build_inserts()
            
        # Create file to upload
        if len(self.insert_values) > 0:
            with open('insert.sql', 'w') as fh:
                fh.write('INSERT INTO {0} ( date, size, imps, clicks, revenue, revenue_usd, revenue_est_net, publisher_id, site_id, placement_id, buyer_member_id, brand_id) VALUES {1};'.format(self.reports_table, ','.join(self.insert_values)))
    
            # Upload to server
            self.put_file('insert.sql','insert.sql')
            try:
                if self.platform == 'all':
                    for pl_name, pl_id in platforms_ids.items():
                        self.do_cmd(self.mysql_prefix + r'-e"DELETE from %s WHERE buyer_member_id=%s AND brand_id=17 AND date=\"%s 00:00:00\" " ' % (self.reports_table, pl_id, self.date,))
                else:
                    if self.platform in platforms_ids:
                        self.do_cmd(self.mysql_prefix + r'-e"DELETE from %s WHERE buyer_member_id=%s AND brand_id=17 AND date=\"%s 00:00:00\" " ' % (self.reports_table, platforms_ids[self.platform], self.date,))
                self.do_cmd(self.mysql_prefix + '< insert.sql')
                #self.do_cmd('/usr/bin/php /var/www/agora/public_html/sync/make_home_mv.php')
            except GracefulError:
                return False
    
        # Finally do the logging
        self.stream_logger.info('Ran for date %s and platform %s' % (self.date, self.platform))
        self.stream_logger.info('Found %s rows in vps database' % self.row_len)
        if self.id_reject_count > 0:
            self.stream_logger.info('Rejected %s rows due to placement id parsing error.' % self.id_reject_count)
        if self.size_reject_count > 0:
            self.stream_logger.info('Rejected %s rows due to size parsing error.' % self.size_reject_count)
        if self.inventory_reject_count > 0:
            self.stream_logger.info('Rejected %s rows due to inventory table lookup error.' % self.inventory_reject_count)
        if self.publisher_fee_reject_count > 0:
            self.stream_logger.info('Rejected %s rows due to publisher fee lookup error.' % self.publisher_fee_reject_count)
        if self.revenue_reject_count > 0:
            self.stream_logger.info('Rejected %s rows due to revenue value error.' % self.revenue_reject_count)    
        self.inserted_values_to_ui = len(self.insert_values)-self.insert_values_eurohoops    
        self.stream_logger.info('Inserted %s rows in agora database' % str(self.inserted_values_to_ui))
        self.stream_logger.info('Project Agora UI update log end')
        self.stream_logger.info('End push_dailies_to_ui')
        
        results = {
            'found': self.row_len,
            'inserted': self.inserted_values_to_ui,
            'inserted_eurohoops': self.insert_values_eurohoops,
            'rejected_id': self.id_reject_count,
            'rejected_size': self.size_reject_count,
            'rejected_inventory': self.inventory_reject_count,
            'rejected_pub_fee': self.publisher_fee_reject_count,
            'rejected_revenue': self.revenue_reject_count,
            'messages':self.messages
        }
        monitor.data({'push_statistics': results})
        monitor.log("End push for platform {0}".format(self.platform))
        return results
Ejemplo n.º 5
0
class ReporterPuller():
        
    def __init__(self, platform, startdate, enddate):
        self.startdate = startdate     
        self.enddate = enddate
        self.platform = platform
        self.reports = MongoConnector('reports').collection
        self.stream_logger = StreamLogger.getLogger(__name__)
        self.reporter_client = ReporterClient(self.platform)
                
    def date_to_mongoid(self, date, daytime=(12,0,0)):
        hours, mins, secs = daytime
        datetimeobj = datetime.datetime.combine(date, datetime.time(hours, mins, secs))
        mongo_id = ObjectId.generate_from_datetime(datetimeobj)
        return mongo_id     
    
    @twpamonitorclient.context('Pull Data From Platform', 'pull_data', {})
    def pull_data(self, monitor):
        
        '''
        def jdefault(o):
            return o.__dict__   
        '''
        if PullModeKeeper.isMuted(self.platform):
            return False
        
        monitor.log("Start pull from platform {0}".format(self.platform))
        
        
        
        db_insert_entries = self.reporter_client.read(self.startdate, self.enddate)
        
        #self.stream_logger.info('len of db insert entries:{0}'.format(len(db_insert_entries)))
        
        monitor.data({'data_pulled_len': len(db_insert_entries)})
        
        # just to produce the data to mock the above function
        '''
        l = {str(k):v for k,v in db_insert_entries.items()}
        print(json.dumps(l, default=jdefault))
        #print(db_insert_entries)
        exit()
        '''
        # if db_insert_entries is empty means
        # an error occured 
        if not db_insert_entries:
            return False
        
        start_mongo_id = self.date_to_mongoid(datetime.datetime.strptime(self.startdate, '%Y-%m-%d'),
                                              (0, 0, 0))
        end_mongo_id = self.date_to_mongoid(datetime.datetime.strptime(self.enddate, '%Y-%m-%d'),
                                            (23, 0, 0))
        # Get db dataset for same date
        reports_cursor = self.reports.find(
            {"$and":
             [
                 {"platform" : self.platform}, 
                 {"_id":{"$gte":start_mongo_id}}, 
                 {"_id":{"$lte":end_mongo_id}}
            ]
             }
                                           )
        db_entries = {}
        for doc in reports_cursor:
            if self.platform == 'appnexus':
                key = ( doc['placement_name'], doc['date'], 
                        doc['buyer_member_id'],  doc['buyer_member_name'])
            else:    
                key = ( doc['placement_name'], doc['date'] )
            db_entries[key] = doc
        #self.stream_logger.info('len of db entries:{0}'.format(len(db_entries)))    
        # Insert rows into database
        monitor.data({'db_data_len': len(db_entries)})
        
        entries_to_insert = []
        new_entries_count = 0
        updated_entries_count = 0
        identical_entries_count = 0
        deleted_entries_count = 0
        
        bulk_update_requests = []
        #self.stream_logger.info('db_insert_entries:{0}'.format(db_insert_entries))
        # update the existing entries
        for key, entry in db_insert_entries.items():
            # add custom mongo id
            if key not in db_entries:
                entry['_id'] = self.date_to_mongoid(datetime.datetime
                                                           .strptime(entry['date'], 
                                                                     '%Y-%m-%d'))
                new_entries_count += 1
                entries_to_insert.append(entry if type(entry) is dict else entry.dict())
            else:
                del db_entries[key]['_id']
                #print('from platform:{0}, from db:{1}'.format(type(entry), type(db_entries[key])))
                match_dict = entry if type(entry) is dict else entry.dict()
                if db_entries[key] == match_dict:
                    #self.stream_logger.info('For identical db entry:{0}'.format(entry.dict()))
                    identical_entries_count += 1
                else:
                    #self.stream_logger.info('For update db entry:{0}'.format(entry.dict()))
                    updated_entries_count += 1
                    # update the entry in database
                    if self.platform == 'appnexus':
                        replace_filter = {'platform': self.platform, 
                                      'placement_name':key[0],
                                      'date':key[1],
                                      'buyer_member_id':key[2],
                                      'buyer_member_name':key[3]
                                     }
                    else:    
                        replace_filter = {'platform': self.platform, 
                                      'placement_name':key[0],
                                      'date':key[1]
                                     }
                    bulk_update_requests.append(ReplaceOne(replace_filter, 
                                                           entry if type(entry) is dict else entry.dict()))
                    #reports_col.replace_one(replace_filter, entry)
                del db_entries[key]
        
        # update existing documents
        try:
            if bulk_update_requests:
                self.reports.bulk_write(bulk_update_requests)
        except BulkWriteError as bwe:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            print('BulkWriteError: {0}, {1}, {2}'.format(exc_type, 
                                                                   exc_value, 
                                                                   exc_traceback))
            monitor.log('BulkWriteError: {0}, {1}, {2}'.format(exc_type, 
                                                                   exc_value, 
                                                                   exc_traceback), 
                        MonitorClient.LOGSTATUS_ERROR)
            
            return False#pprint(bwe.details)        
        # insert new entries
        if entries_to_insert:
            self.reports.insert_many(entries_to_insert)
        
        # delete the entries that are no more exist in the platform's statistics        
        deleted_entries_count = len(db_entries)        
        for key in db_entries:
            if self.platform == 'appnexus':
                delete_filter = {'platform': self.platform, 
                                      'placement_name':key[0],
                                      'date':key[1],
                                      'buyer_member_id':key[2],
                                      'buyer_member_name':key[3]
                            }
            else:
                delete_filter = {'platform': self.platform, 
                                      'placement_name':key[0],
                                      'date':key[1]
                            }    
            self.reports.delete_one(delete_filter)
        results = {
            "new_entries_count": new_entries_count,
        "updated_entries_count": updated_entries_count,
        "identical_entries_count": identical_entries_count,
        "deleted_entries_count": deleted_entries_count
        }    
        
        monitor.data({'pull_results': results})
        #self.stream_logger.info('Results:{0}'.format(results))
        monitor.log("End pull from platform {0}".format(self.platform))
        
        return results
Ejemplo n.º 6
0
class PullDataTestCase(TestCase):

    use_case_valid_commands = [
        'pull_appnexus',
        'pull_adsense',
        'pull_adx',
        'pull_teads',
        'pull_criteo',
        'pull_criteohb',
        'pull_taboola',
        'pull_smart',
        'pull_smaato',
        'pull_facebook',
        'pull_rubicon',
        'pull_pubmatic',
    ]

    expected_results_insert_new = {
        'appnexus': {
            'updated_entries_count': 0,
            'new_entries_count': 27292,
            'identical_entries_count': 0,
            'deleted_entries_count': 0
        },
        'adsense': {
            'updated_entries_count': 0,
            'new_entries_count': 21,
            'identical_entries_count': 0,
            'deleted_entries_count': 0
        },
        'adx': {
            'deleted_entries_count': 0,
            'new_entries_count': 1333,
            'updated_entries_count': 0,
            'identical_entries_count': 0
        },
        'teads': {
            'new_entries_count': 69,
            'deleted_entries_count': 0,
            'identical_entries_count': 0,
            'updated_entries_count': 0
        },
        'criteo': {
            'identical_entries_count': 0,
            'deleted_entries_count': 0,
            'new_entries_count': 3196,
            'updated_entries_count': 0
        },
        'criteohb': {
            'new_entries_count': 23,
            'deleted_entries_count': 0,
            'identical_entries_count': 0,
            'updated_entries_count': 0
        },
        'taboola': {
            'identical_entries_count': 0,
            'deleted_entries_count': 0,
            'new_entries_count': 264,
            'updated_entries_count': 0
        },
        'smart': {
            'deleted_entries_count': 0,
            'identical_entries_count': 0,
            'new_entries_count': 422,
            'updated_entries_count': 0
        },
        'rubicon': {
            'new_entries_count': 2871,
            'identical_entries_count': 0,
            'deleted_entries_count': 0,
            'updated_entries_count': 0
        },
        'smaato': {
            'deleted_entries_count': 0,
            'identical_entries_count': 0,
            'updated_entries_count': 0,
            'new_entries_count': 85
        },
        'facebook': {
            'new_entries_count': 5,
            'updated_entries_count': 0,
            'identical_entries_count': 0,
            'deleted_entries_count': 0
        },
        'pubmatic': {
            'identical_entries_count': 0,
            'updated_entries_count': 0,
            'deleted_entries_count': 0,
            'new_entries_count': 679
        }
    }

    expected_results = {
        'pull_appnexus': {
            'deleted_entries_count': 1,
            'new_entries_count': 28,
            'updated_entries_count': 43,
            'identical_entries_count': 27221
        },
        'pull_adsense': {
            'identical_entries_count': 17,
            'new_entries_count': 2,
            'deleted_entries_count': 1,
            'updated_entries_count': 2
        },
        'pull_adx': {
            'new_entries_count': 2,
            'updated_entries_count': 2,
            'identical_entries_count': 2677,
            'deleted_entries_count': 1
        },
        'pull_teads': {
            'updated_entries_count': 2,
            'identical_entries_count': 128,
            'new_entries_count': 2,
            'deleted_entries_count': 1
        },
        'pull_criteo': {
            'deleted_entries_count': 1,
            'identical_entries_count': 6331,
            'new_entries_count': 2,
            'updated_entries_count': 2
        },
        'pull_criteohb': {
            'deleted_entries_count': 1,
            'updated_entries_count': 2,
            'identical_entries_count': 42,
            'new_entries_count': 2
        },
        'pull_taboola': {
            'identical_entries_count': 523,
            'deleted_entries_count': 1,
            'new_entries_count': 2,
            'updated_entries_count': 2
        },
        'pull_smart': {
            'deleted_entries_count': 1,
            'updated_entries_count': 2,
            'new_entries_count': 2,
            'identical_entries_count': 418
        },
        'pull_rubicon': {
            'deleted_entries_count': 1,
            'updated_entries_count': 2,
            'new_entries_count': 2,
            'identical_entries_count': 2866
        },
        'pull_smaato': {
            'deleted_entries_count': 1,
            'identical_entries_count': 168,
            'updated_entries_count': 2,
            'new_entries_count': 2
        },
        'pull_facebook': {
            'new_entries_count': 2,
            'identical_entries_count': 1,
            'updated_entries_count': 2,
            'deleted_entries_count': 0
        },
        'pull_pubmatic': {
            'new_entries_count': 2,
            'identical_entries_count': 1,
            'updated_entries_count': 2,
            'deleted_entries_count': 0
        }
    }

    removed_placements = {
        'pull_appnexus': [
            '4157463_real.gr_politics_970x250',  #17
            '11766412_reader.gr_homepage-1_970x250'  #11
        ],
        'pull_adsense': [
            '12351524_LocaleNetwork_ros_970x250',
            '10011495_alwakeelnews.com_ros_970x250'
        ],
        'pull_adx': [
            '11974846_apkroids.com_ros_300x250',
            '12686534_babyradio.gr_adblock-1_300x600',
        ],
        'pull_teads': [
            '79096 - inread - meteorologos.gr_11550271',
            '85264 - inRead - themamagers.gr_12350620'
        ],
        'pull_criteo': [
            '12001599_gazzetta.gr_adblock-3_300x250',
            '12001600_gazzetta.gr_adblock-4_300x250'
        ],
        'pull_criteohb': [
            '12552348_merrjep.com_postBid_300x600',
            '12552372_merrjep.al_postBid_300x600'
        ],
        'pull_taboola': ['ant1iwo#CY-p11169123', 'go4it#RO-p10884104'],
        'pull_smart': [
            '7491770_m.businessmagazin.ro_ros-2_300x250',
            '4964992_antenna.gr_ros_300x250'
        ],
        'pull_rubicon': [
            '9017352_go4it.ro_Homepage-branding_970x250',
            '8551373_newsbomb.gr_Autokinito-2_300x250'
        ],
        'pull_smaato': [
            '9905337_alphatv.gr_ros-4_300x250',
            '9905336_alphatv.gr_ros-3_300x250'
        ],
        'pull_facebook': [
            '5600778_m.antena3.ro_allsite_300x250',
            '5665857_m.kanald.ro_allsite_300x250'
        ],
        'pull_pubmatic':
        ['10030247_skai.gr_ros-2_300x250', '10129032_tpu.ro_ros_970x250'],
    }

    changed_placements = {
        'pull_appnexus': [
            '12480112_frontpages.gr_homepage-ros-side-perf_300x250',  #40
            '11998822_jurnalmm.ro_inarticle_319x49'  #3
        ],
        'pull_adsense': [
            '12531406_savoirville.gr_ros-2_300x250',
            '12484008_LocaleNetwork_ros_970x90'
        ],
        'pull_adx': [
            '12659596_queen.gr_mobile-2_300x250',
            '6601364_lovecooking.gr_ros_728x90'
        ],
        'pull_teads': [
            '78426 - inRead - holdpont.hu_7198944',
            '78427 - inRead - beszeljukmac.com_6045377'
        ],
        'pull_criteo': [
            '4565457_skai.gr_localnews_300x600',
            '10030247_skai.gr_ROS-2_300x250'
        ],
        'pull_criteohb': [
            '12552413_mojtrg.rs_postBid_728x90',
            '12552406_mojtrg.rs_postBid_300x250'
        ],
        'pull_taboola':
        ['webnyeremeny#HU-f11904363', 'royanews#MENA-p11308540'],
        'pull_smart': [
            '7052351_alphatv.gr_tvshows_970x250',
            '11837266_garbo.ro_ros-2_300x250'
        ],
        'pull_rubicon': [
            '11441382_xe.gr_automoto-ad-details_728x90',
            '9916795_queen.gr_mageirikh-1_728x90'
        ],
        'pull_smaato': [
            '7491935_m.csid.ro_ros-2_300x250',
            '11071848_m.observator.tv_homepage_300x250'
        ],
        'pull_facebook': [
            '3841343_m.alphatv.gr_mobile_300x250',
            '4068005_m.spynews.ro_allsite_300x250'
        ],
        'pull_pubmatic':
        ['12112559_gustos.ro_ros_300x250', '12112570_gustos.ro_ros_728x90'],
    }

    request_body = ''
    expected_data = []
    dict_expected_data = {}
    dict_nd_expected_data = {}
    dbdata = []
    command = 'pull_appnexus'
    reports = MongoConnector('reports').collection

    @classmethod
    def date_to_mongoid(self, date, daytime=(0, 0, 0)):
        hours, mins, secs = daytime
        datetimeobj = datetime.datetime.combine(
            date, datetime.time(hours, mins, secs))
        mongo_id = ObjectId.generate_from_datetime(datetimeobj)
        return mongo_id

    @classmethod
    def setCommand(cls, command):
        cls.command = command

    @classmethod
    def setInOutData(cls, command):
        cls.platform = cls.getPlatform(command)
        cls.dates = cls.getDates(command)
        cls.expected_data = cls.getExpected(command)
        cls.dict_expected_data = cls.getDictExpected(command)
        cls.dict_nd_expected_data = cls.getDictExpected(command, next_day=True)
        cls.dbdata = cls.getDbData()

    @classmethod
    def getSuiteOfValidTests(cls):
        suite = unittest.TestSuite()
        suite.addTest(cls('test_1_pull_data_insert'))
        suite.addTest(cls('test_2_pull_data_update'))
        suite.addTest(cls('test_3_pull_data_update_revenue'))
        suite.addTest(cls('test_4_pull_data_update_multiple_fields'))
        suite.addTest(cls('test_5_pull_data_insert_different_dates'))
        suite.addTest(cls('test_6_set_pull_mode_mute'))
        suite.addTest(cls('test_7_set_pull_mode_normal'))
        return suite

    @classmethod
    def getDbData(cls):
        #selected_placements = cls.selected_placements[command]
        start_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[0], '%Y-%m-%d'))
        end_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[1], '%Y-%m-%d'), (23, 0, 0))
        '''
        if cls.platform in ('appnexus', 'adsense', 'smart', 'facebook', 'rubicon'):
            end_mongo_id = cls.date_to_mongoid(
                datetime.datetime.strptime(cls.dates[1], '%Y-%m-%d')
                , (23, 0, 0))
        else:
            end_mongo_id = cls.date_to_mongoid(
                datetime.datetime.strptime(cls.dates[1], '%Y-%m-%d')
                )
        '''
        # Get db dataset for same date
        reports_cursor = cls.reports.find(
            {
                "$and": [{
                    "platform": cls.platform
                }, {
                    "_id": {
                        "$gte": start_mongo_id
                    }
                }, {
                    "_id": {
                        "$lt": end_mongo_id
                    }
                }]
            }, {
                '_id': 0,
                'placement_name': 0
            })

        db_entries = []
        for doc in reports_cursor:
            if cls.platform == "appnexus":
                #print(doc)
                if doc['buyer_member_id'] == '2026':
                    db_entries.append(doc)
            else:
                db_entries.append(doc)

        return db_entries

    @classmethod
    def insertDepricatedDbData(cls):
        platform = cls.getPlatform(cls.command)
        # Get db dataset for same date
        mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime("2018-01-20", '%Y-%m-%d'), (12, 0, 0))
        result = cls.reports.insert({
            "_id": mongo_id,
            "platform": platform,
            "date": "2018-01-20",
            "placement_id": 11974846,
            "placement_name": "11974846_depricated_apkroids.com_ros_300x250",
            "total_impressions": 21539,
            "resold_impressions": 127,
            "buyer_member_id": "2026",
            "buyer_member_name": " ",
            "revenue": 0.01,
            "revenue_dict": {
                "EUR": 0.01,
                "USD": 0.012221
            },
            "clicks": 0
        })
        #print('inserted_count:{0}'.format(result.inserted_count))
        return result

    @classmethod
    def removeDepricatedDbData(cls):
        platform = cls.getPlatform(cls.command)
        # Get db dataset for same date
        result = cls.reports.delete_many({
            "$and": [{
                "platform": cls.platform
            }, {
                "placement_name":
                "11974846_depricated_apkroids.com_ros_300x250"
            }]
        })
        return result

    @classmethod
    def removeDbData(cls):

        start_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[0], '%Y-%m-%d'))
        end_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[1], '%Y-%m-%d'), (23, 0, 0))
        # Get db dataset for same date
        result = cls.reports.delete_many({
            "$and": [{
                "platform": cls.platform
            }, {
                "_id": {
                    "$gte": start_mongo_id
                }
            }, {
                "_id": {
                    "$lt": end_mongo_id
                }
            }]
        })

        return result.deleted_count

    @classmethod
    def removeSelectedDbData(cls):
        selected_placements = cls.removed_placements[cls.command]
        start_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[0], '%Y-%m-%d'))
        end_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[1], '%Y-%m-%d'), (23, 0, 0))

        # Get db dataset for same date
        result = cls.reports.delete_many({
            "$and": [{
                "platform": cls.platform
            }, {
                "_id": {
                    "$gte": start_mongo_id
                }
            }, {
                "_id": {
                    "$lt": end_mongo_id
                }
            }, {
                "placement_name": {
                    "$in": selected_placements
                }
            }]
        })

        return result.deleted_count

    @classmethod
    def updateSelectedDbDataRevenueField(cls):
        selected_placements = cls.changed_placements[cls.command]
        start_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[0], '%Y-%m-%d'))
        end_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[1], '%Y-%m-%d'), (23, 0, 0))

        result = cls.reports.update_many(
            {
                "$and": [{
                    "platform": cls.platform
                }, {
                    "_id": {
                        "$gte": start_mongo_id
                    }
                }, {
                    "_id": {
                        "$lt": end_mongo_id
                    }
                }, {
                    "placement_name": {
                        "$in": selected_placements
                    }
                }]
            }, {
                "$set": {
                    "revenue": 100000.0,
                    "revenue_dict.EUR": 100000.0,
                    "revenue_dict.USD": 100000.0
                }
            })

        return result.modified_count

    @classmethod
    def updateSelectedDbDataMultipleFields(cls):
        selected_placements = cls.changed_placements[cls.command]
        start_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[0], '%Y-%m-%d'))
        end_mongo_id = cls.date_to_mongoid(
            datetime.datetime.strptime(cls.dates[1], '%Y-%m-%d'), (23, 0, 0))

        result = cls.reports.update_many(
            {
                "$and": [{
                    "platform": cls.platform
                }, {
                    "_id": {
                        "$gte": start_mongo_id
                    }
                }, {
                    "_id": {
                        "$lt": end_mongo_id
                    }
                }, {
                    "placement_name": {
                        "$in": selected_placements
                    }
                }]
            }, {
                "$set": {
                    "revenue": 0.0,
                    "revenue_dict.EUR": 0.0,
                    "revenue_dict.USD": 0.0,
                    "total_impressions": 0,
                    "resold_impressions": 0
                }
            })

        return result.modified_count

    def setUp(self):
        pass
        self.maxDiff = None

    def tearDown(self):
        pass

    #@unittest.skip('just skip')
    def test_1_pull_data_insert(self):
        # for teads platform wait before the next request
        if self.platform in ('teads'):
            time.sleep(5)
        #self.maxDiff = None
        # delete data from database
        self.removeDbData()

        # set env variable for mongo host
        #os.environ['MONGO_HOST'] = '127.0.0.1'
        #os.environ['MONGO_HOST'] = 'mongodb'
        """
        if self.platform == 'appnexus':
            puller = ReporterPuller(self.platform, self.dates[0], self.dates[1])
        
            puller.reporter_client.read = MagicMock(
                                            return_value=self.dict_expected_data)
            ret_data = puller.pull_data()
        else:    
            ret_data = JsonRPCClient("tcp://*****:*****@unittest.skip('just skip')
    def test_2_pull_data_update(self):
        #if self.platform == 'teads':
        #    time.sleep(5)
        # get expected data
        # set env variable for mongo host
        #os.environ['MONGO_HOST'] = 'localhost'
        puller = ReporterPuller(self.platform, self.dates[0], self.dates[1])

        puller.reporter_client.read = MagicMock(
            return_value=self.dict_expected_data)

        ret_data = puller.pull_data()
        #print(ret_data)
        self.dbdata = self.getDbData()
        #print(len(self.dbdata))
        #print(len(self.expected_data))
        #self.assertEqual(ret_data, True,
        #                                            'correct pull process')
        # read the data from the database and compare them with
        # expected data
        self.assertCountEqual(self.dbdata, self.expected_data,
                              'correct pulled data')

    #@unittest.skip('just skip')
    def test_3_pull_data_update_revenue(self):
        if self.platform in ('teads'):
            time.sleep(5)

        # for teads platform wait before the next request
        #self.maxDiff = None
        # delete data from database
        self.updateSelectedDbDataRevenueField()
        self.removeSelectedDbData()
        self.removeDepricatedDbData()
        self.insertDepricatedDbData()
        # set env variable for mongo host
        #os.environ['MONGO_HOST'] = '127.0.0.1'
        #os.environ['MONGO_HOST'] = 'mongodb'
        puller = ReporterPuller(self.platform, self.dates[0], self.dates[1])

        puller.reporter_client.read = MagicMock(
            return_value=self.dict_expected_data)
        ret_data = puller.pull_data()

        #print(ret_data)
        expected_results = self.expected_results[self.command]
        #self.assertEqual(ret_data, expected_results, 'results are correct')
        self.dbdata = self.getDbData()

        self.assertCountEqual(self.dbdata, self.expected_data,
                              'pulled data are correct')

    #@unittest.skip('just skip')
    def test_4_pull_data_update_multiple_fields(self):
        # for teads platform wait before the next request
        if self.platform in ('teads'):
            time.sleep(5)
        self.maxDiff = None
        # delete data from database
        self.updateSelectedDbDataMultipleFields()
        self.removeSelectedDbData()
        self.removeDepricatedDbData()
        self.insertDepricatedDbData()
        # set env variable for mongo host
        #os.environ['MONGO_HOST'] = '127.0.0.1'
        #os.environ['MONGO_HOST'] = 'mongodb'
        puller = ReporterPuller(self.platform, self.dates[0], self.dates[1])

        puller.reporter_client.read = MagicMock(
            return_value=self.dict_expected_data)
        ret_data = puller.pull_data()
        #print(ret_data)
        expected_results = self.expected_results[self.command]
        #self.assertEqual(ret_data, expected_results, 'results are correct')
        self.dbdata = self.getDbData()
        self.assertCountEqual(self.dbdata, self.expected_data,
                              'correct puremoveDepricatedDbDatalled data')

    #@unittest.skip('just skip')
    def test_5_pull_data_insert_different_dates(self):
        # for teads platform wait before the next request
        if self.platform in ('teads'):
            time.sleep(5)
        #self.maxDiff = None
        # delete data from database
        self.removeDbData()

        # set env variable for mongo host
        #os.environ['MONGO_HOST'] = '127.0.0.1'
        #os.environ['MONGO_HOST'] = 'mongodb'

        # pull data for 2018-01-20
        puller = ReporterPuller(self.platform, self.dates[0], self.dates[1])

        puller.reporter_client.read = MagicMock(
            return_value=self.dict_expected_data)
        ret_data = puller.pull_data()

        # pull data for next day
        puller = ReporterPuller(self.platform, self.dates[2], self.dates[3])

        puller.reporter_client.read = MagicMock(
            return_value=self.dict_nd_expected_data)
        ret_data = puller.pull_data()

        # pull data for 2018-01-20
        puller = ReporterPuller(self.platform, self.dates[0], self.dates[1])

        puller.reporter_client.read = MagicMock(
            return_value=self.dict_expected_data)
        ret_data = puller.pull_data()

        #self.assertEqual(ret_data, True, 'correct pull process')
        self.dbdata = self.getDbData()
        #print(len(self.dbdata))
        self.assertCountEqual(self.dbdata, self.expected_data,
                              'correct pulled data')

    #@unittest.skip('just skip')
    def test_6_set_pull_mode_mute(self):
        # for teads platform wait before the next request
        if self.platform in ('teads'):
            time.sleep(5)
        # set env variable for mongo host
        #os.environ['MONGO_HOST'] = '127.0.0.1'
        #os.environ['MONGO_HOST'] = 'mongodb'

        JsonRPCClient("tcp://*****:*****@unittest.skip('just skip')
    def test_7_set_pull_mode_normal(self):
        # for teads platform wait before the next request
        if self.platform in ('teads'):
            time.sleep(5)
        # set env variable for mongo host
        #os.environ['MONGO_HOST'] = '127.0.0.1'
        #os.environ['MONGO_HOST'] = 'mongodb'

        JsonRPCClient("tcp://localhost:5552").rpc_call('pull_mode_normal',
                                                       platform=self.platform)

        puller = ReporterPuller(self.platform, self.dates[0], self.dates[1])

        puller.reporter_client.read = MagicMock(
            return_value=self.dict_expected_data)
        ret_data = puller.pull_data()

        self.assertNotEqual(ret_data, False, 'correct pulled data')

        JsonRPCClient("tcp://localhost:5552").rpc_call('pull_mode_normal',
                                                       platform=self.platform)

        puller = ReporterPuller(self.platform, self.dates[0], self.dates[1])

        puller.reporter_client.read = MagicMock(
            return_value=self.dict_expected_data)
        ret_data = puller.pull_data()

        self.assertNotEqual(ret_data, False, 'correct pulled data')
Ejemplo n.º 7
0
 def setUp(self):
     self.reporter_client = ReporterClient()
     self.reports = MongoConnector('reports').collection
Ejemplo n.º 8
0
class DataPusherTestCase(unittest.TestCase):
    def setUp(self):
        self.reporter_client = ReporterClient()
        self.reports = MongoConnector('reports').collection

    def tearDown(self):
        pass

    def updateReports(self):

        # update teads placement name
        self.reports.update_many(
            {
                "$and": [{
                    "platform": "teads"
                }, {
                    "placement_name":
                    "77422 - inRead -  sportfm.gr_5687603"
                }]
            }, {
                "$set": {
                    "placement_name": "77422 - inRead -  sportfm.gr5687603"
                }
            })

        # update adx size
        self.reports.update_many(
            {
                "$and": [{
                    "platform": "adx"
                }, {
                    "placement_name": "11974846_apkroids.com_ros_300x250"
                }]
            },
            {"$set": {
                "placement_name": "11974846_apkroids.com_ros300x250"
            }})

        # update taboola size field in placement name
        self.reports.update_many(
            {
                "$and": [{
                    "platform": "taboola"
                }, {
                    "placement_name": "insider#300x250#GR-r11409755"
                }]
            }, {"$set": {
                "placement_name": "insider#300250#GR-r11409755"
            }})

        # update taboola revenue
        self.reports.update_many(
            {
                "$and": [{
                    "platform": "criteo"
                }, {
                    "placement_name": "11777559_ma7room.com_ros_970x250"
                }]
            }, {"$set": {
                "revenue_dict.USD": "10,34"
            }})

        # update criteo revenue
        self.reports.update_many(
            {
                "$and": [{
                    "platform": "criteo"
                }, {
                    "placement_name":
                    "12001599_gazzetta.gr_adblock-3_300x250"
                }]
            }, {"$set": {
                "revenue_dict.EUR": "10,35"
            }})

        return True

    #@unittest.skip('just skip')
    def test_1_push_all(self):
        self.maxDiff = None
        expected = {
            'found':
            8285,
            'inserted':
            8267,
            'inserted_eurohoops':
            528,
            'rejected_id':
            8,
            'rejected_size':
            2,
            'rejected_inventory':
            6,
            'rejected_pub_fee':
            0,
            'rejected_revenue':
            2,
            'messages': [
                'Platform adsense. Rejecting "TheFrog_300x250". Could_not_parse_placement_id',
                'Platform adsense. Rejecting "Test_MSN_Remnant_300x250". Could_not_parse_placement_id',
                'Platform adsense. Rejecting "Test_MSN_Remnant_728x90". Could_not_parse_placement_id',
                'Platform adsense. Rejecting "Houselife Adsense 300x250". Could_not_parse_placement_id',
                'Platform adsense. Rejecting "YL_300x250". Could_not_parse_placement_id',
                'Platform adx. Rejecting "11974846_apkroids.com_ros300x250". Could_not_parse_placement_size',
                'Platform adx. Rejecting "(No tag)". Could_not_parse_placement_id',
                'Platform teads. Rejecting "77422 - inRead -  sportfm.gr5687603". Could_not_parse_placement_id',
                'Platform criteo. Rejecting "12001599_gazzetta.gr_adblock-3_300x250". revenue_is_not_float',
                'Platform criteo. Rejecting "12341051_missbloom.bg_homepage-ros-b-perf_300x600". Could_not_find_it_in_inventory_table',
                'Platform criteo. Rejecting "12341048_missbloom.bg_homepage-ros-b-perf_300x250". Could_not_find_it_in_inventory_table',
                'Platform criteo. Rejecting "11777559_ma7room.com_ros_970x250". revenue_is_not_float',
                'Platform criteo. Rejecting "12341055_missbloom.bg_homepage-ros-perf_970x250". Could_not_find_it_in_inventory_table',
                'Platform taboola. Rejecting "mother#GR-p9964478_not_used - DEACTIVATED". Could_not_parse_placement_id',
                'Platform taboola. Rejecting "insider#300250#GR-r11409755". Could_not_parse_placement_size',
                'Platform rubicon. Rejecting "12341050_missbloom.bg_homepage-ros-a-perf_300x600". Could_not_find_it_in_inventory_table',
                'Platform rubicon. Rejecting "12341056_missbloom.bg_homepage-ros-perf_728x90". Could_not_find_it_in_inventory_table',
                'Platform rubicon. Rejecting "12341055_missbloom.bg_homepage-ros-perf_970x250". Could_not_find_it_in_inventory_table'
            ]
        }

        self.updateReports()

        #rs = self.reporter_client.push_to_ui('2018-01-20', 'test')
        ret_data = JsonRPCClient("tcp://*****:*****@unittest.skip('just skip')
    def test_2_push_adsense(self):
        self.maxDiff = None
        expected = {
            'found':
            21,
            'inserted':
            16,
            'inserted_eurohoops':
            1,
            'rejected_id':
            5,
            'rejected_size':
            0,
            'rejected_inventory':
            0,
            'rejected_pub_fee':
            0,
            'rejected_revenue':
            0,
            'messages': [
                'Platform adsense. Rejecting "Houselife Adsense 300x250". Could_not_parse_placement_id',
                'Platform adsense. Rejecting "Test_MSN_Remnant_300x250". Could_not_parse_placement_id',
                'Platform adsense. Rejecting "Test_MSN_Remnant_728x90". Could_not_parse_placement_id',
                'Platform adsense. Rejecting "TheFrog_300x250". Could_not_parse_placement_id',
                'Platform adsense. Rejecting "YL_300x250". Could_not_parse_placement_id'
            ]
        }

        self.updateReports()

        #rs = self.reporter_client.push_to_ui('2018-01-20', 'test')
        ret_data = JsonRPCClient("tcp://localhost:5552").rpc_call(
            'push_data', platform='adsense', date='2018-01-20', mode='test')
        # order messages in both dicts
        if 'messages' in expected:
            expected['messages'] = sorted(expected['messages'])

        if 'messages' in ret_data['result']:
            ret_data['result']['messages'] = sorted(
                ret_data['result']['messages'])

        print(ret_data['result'])

        self.assertDictEqual(ret_data['result'], expected,
                             'Push to UI is correct')