def test_total_is_defined(self): ei = api.Cursor( adaccount.AdAccount(fbid='123'), ad.Ad, ) ei._total_count = 32 self.assertEqual(ei.total(), 32)
def test_total_is_none(self): ei = api.Cursor( adaccount.AdAccount(fbid='123'), ad.Ad, ) self.assertRaises(exceptions.FacebookUnavailablePropertyException, ei.total)
def test_builds_from_array(self): """ Sometimes the response returns an array inside the data key. This asserts that we successfully build objects using the objects in that array. """ response = {"data": [{"id": "6019579"}, {"id": "6018402"}]} ei = api.Cursor( adaccount.AdAccount(fbid='123'), ad.Ad, ) objs = ei.build_objects_from_response(response) assert len(objs) == 2
def test_builds_from_object(self): """ Sometimes the response returns a single JSON object. This asserts that we're not looking for the data key and that we correctly build the object without relying on the data key. """ response = { "id": "601957/targetingsentencelines", "targetingsentencelines": [{ "content": "Location - Living In:", "children": ["United States"] }, { "content": "Age:", "children": ["18 - 65+"] }] } ei = api.Cursor( adaccount.AdAccount(fbid='123'), ad.Ad, ) obj = ei.build_objects_from_response(response) assert len( obj) == 1 and obj[0]['id'] == "601957/targetingsentencelines"
def test_delitem_changes_history(self): account = adaccount.AdAccount() account['name'] = 'foo' assert len(account._changes) > 0 del account['name'] assert len(account._changes) == 0
def _process_job(args: ThreadArgs, job: JobQueueItem, api: FacebookAdsApi) -> None: account_id: str = job.ad_account_id date_str: str = job.date.strftime('%Y-%m-%d') job.try_count += 1 job_info_str: str = 'act_{ad_account_id} on {single_date}'.format( ad_account_id=account_id, single_date=date_str) _log(logging.info, args.logging_mutex, [ 'download Facebook ad performance of {job}' ' - attempt #{attempt}'.format(job=job_info_str, attempt=job.try_count) ]) # platform specific timer start = timeit.default_timer() request_error_occured: bool = False request_error_is_rate_limit: bool = False error_occured: bool = False error_msg: typing.List[str] = list() ad_insights: adsinsights.AdsInsights try: ad_account = adaccount.AdAccount('act_' + account_id, api=api) ad_insights = get_account_ad_performance_for_single_day( ad_account, job.date) with sqlite3.connect(job.db_name) as con: _upsert_ad_performance(ad_insights, con) end = timeit.default_timer() _log(logging.info, args.logging_mutex, [ 'finished download Facebook ad performance of {job}' ' in {time}s - attempt #{attempt}'.format(job=job_info_str, time=round( end - start, 2), attempt=job.try_count) ]) with args.state_changed_cv: args.jobs_left -= 1 if args.jobs_left == 0: args.state_changed_cv.notify() except FacebookRequestError as e: request_error_occured = True # This is the error details of a rate limiting error. The message is "User request limit reached" request_error_is_rate_limit = e.api_error_type( ) == 'OAuthException' and e.api_error_code() == 17 error_msg.append(e.get_message()) error_msg.append(e.api_error_message()) except Exception as e: error_occured = True error_msg.append(traceback.format_exc()) if request_error_occured: if job.try_count < 8: duration: int = 60 * 2**(job.try_count - 1) retry_at: datetime.datetime = datetime.datetime.now( ) + datetime.timedelta(seconds=duration) retry_msg: str = 'retrying {job} in {duration} seconds - attempt #{attempt}'.format( job=job_info_str, attempt=job.try_count, duration=duration) error_msg.append(retry_msg) with args.retry_queue_cv: heapq.heappush(args.retry_queue, RetryQueueItem(retry_at, job)) args.retry_queue_cv.notify_all() _log(logging.warning, args.logging_mutex, error_msg) if request_error_is_rate_limit: # If the error was caused by rate limiting, sleep here to block the worker. # Otherwise FB will keep being bombarded by uninterrupted requests constantly hitting the rate limit. # Don't block execution otherwise (if it's not this particular error). time.sleep(duration) return else: error_occured = True error_msg.append('download of {job} failed too many times'.format( job=job_info_str)) if error_occured: _log(logging.error, args.logging_mutex, error_msg) with args.state_changed_cv: # technically does not require locking but it is needed for the notify to work # so might as well put this in scope args.error_occured = True args.done = True args.state_changed_cv.notify() return
def test_inherits_account_id(self): parent_id = 'act_19tg0j239g023jg9230j932' api.FacebookAdsApi.set_default_account_id(parent_id) ac = adaccount.AdAccount() assert ac.get_parent_id() == parent_id api.FacebookAdsApi._default_account_id = None
def _process_job(args: ThreadArgs, job: JobQueueItem, api: FacebookAdsApi) -> None: account_id: str = job.ad_account_id date_str: str = job.date.strftime('%Y-%m-%d') job.try_count += 1 job_info_str: str = 'act_{ad_account_id} on {single_date}'.format( ad_account_id=account_id, single_date=date_str) _log(logging.info, args.logging_mutex, [ 'download Facebook ad performance of {job}' ' - attempt #{attempt}'.format(job=job_info_str, attempt=job.try_count) ]) # platform specific timer start = timeit.default_timer() request_error_occured: bool = False error_occured: bool = False error_msg: typing.List[str] = list() ad_insights: adsinsights.AdsInsights try: ad_account = adaccount.AdAccount('act_' + account_id, api=api) ad_insights = get_account_ad_performance_for_single_day( ad_account, job.date) with sqlite3.connect(job.db_name) as con: _upsert_ad_performance(ad_insights, con) end = timeit.default_timer() _log(logging.info, args.logging_mutex, [ 'finished download Facebook ad performance of {job}' ' in {time}s - attempt #{attempt}'.format(job=job_info_str, time=round( end - start, 2), attempt=job.try_count) ]) with args.state_changed_cv: args.jobs_left -= 1 if args.jobs_left == 0: args.state_changed_cv.notify() except FacebookRequestError as e: request_error_occured = True error_msg.append(e.get_message()) error_msg.append(e.api_error_message()) except Exception as e: error_occured = True error_msg.append(str(e)) if request_error_occured: if job.try_count < 8: duration: int = 60 * 2**(job.try_count - 1) retry_at: datetime.datetime = datetime.datetime.now( ) + datetime.timedelta(seconds=duration) retry_msg: str = 'retrying {job} in {duration} seconds - attempt #{attempt}'.format( job=job_info_str, attempt=job.try_count, duration=duration) error_msg.append(retry_msg) with args.retry_queue_cv: heapq.heappush(args.retry_queue, RetryQueueItem(retry_at, job)) args.retry_queue_cv.notify_all() _log(logging.warning, args.logging_mutex, error_msg) return else: error_occured = True error_msg.append('download of {job} failed too many times'.format( job=job_info_str)) if error_occured: _log(logging.error, args.logging_mutex, error_msg) with args.state_changed_cv: # technically does not require locking but it is needed for the notify to work # so might as well put this in scope args.error_occured = True args.done = True args.state_changed_cv.notify() return
def reports(self, date_start, date_end, ad_accounts, extracted, reports=pandas.DataFrame()): ad_account = random.choice(ad_accounts) while ad_account in extracted: ad_account = random.choice(ad_accounts) print("[Facebook] - Extracting data for ad_account=%s" % (ad_account)) account = adaccount.AdAccount(ad_account) insights = account.get_insights(fields=[ adsinsights.AdsInsights.Field.date_start, adsinsights.AdsInsights.Field.campaign_name, adsinsights.AdsInsights.Field.adset_name, adsinsights.AdsInsights.Field.ad_name, adsinsights.AdsInsights.Field.impressions, adsinsights.AdsInsights.Field.clicks, adsinsights.AdsInsights.Field.spend, ], params={ 'level': adsinsights.AdsInsights.Level.ad, 'time_increment': '1', 'time_range': { 'since': date_start, 'until': date_end }, }, async=True) time.sleep(20) results = [] async_job = insights.remote_read() while async_job['async_status'] != 'Job Completed': print('[Facebook] - Percent completed from async run=' + str(async_job['async_percent_completion'])) time.sleep(20) async_job = insights.remote_read() if async_job['async_status'] == 'Job Completed': print('[Facebook] - Percent completed from async run=' + str(async_job['async_percent_completion'])) time.sleep(20) results = [x for x in insights.get_result()] if results: facebook = pandas.DataFrame(results, columns=[ 'ad_name', 'adset_name', 'campaign_name', 'clicks', 'date_start', 'date_stop', 'impressions', 'spend' ]) facebook['spend'] = facebook['spend'].apply( lambda x: str(x.replace('.', ','))) facebook = facebook[[ 'date_start', 'date_stop', 'campaign_name', 'adset_name', 'ad_name', 'impressions', 'clicks', 'spend' ]] facebook.drop(['date_stop'], inplace=True, axis=1) facebook['date_start'] = pandas.to_datetime(pandas.Series( facebook['date_start']), format="%Y-%m-%d") facebook = facebook.sort_values(by='date_start') reports = pandas.concat([reports, facebook]) else: facebook = pandas.DataFrame(results, columns=[ 'ad_name', 'adset_name', 'campaign_name', 'clicks', 'date_start', 'date_stop', 'impressions', 'spend' ]) reports = pandas.concat([reports, facebook]) extracted.append(ad_account) if sorted(extracted) != sorted(ad_accounts): return self.reports(date_start, date_end, ad_accounts, extracted, reports) else: return reports