def getStrategies(self): google_service_type = "BiddingStrategyService" client = Update(account_id, google_service_type).getClient() service = client.GetService(google_service_type, version='v201809') PAGE_SIZE = 100 offset = 0 selector = { 'fields': [ 'BiddingScheme', 'Name', 'Id', 'Status', 'Type' ], 'paging': { 'startIndex': str(offset), 'numberResults': str(PAGE_SIZE) } } results = [] more_pages = True while more_pages: page = service.get(selector) # Display results. if 'entries' in page: for result_set in page['entries']: results.append(result_set) else: print('No results were found.') offset += PAGE_SIZE selector['paging']['startIndex'] = str(offset) more_pages = offset < int(page['totalNumEntries'])
def __init__(self, account_id, new_ads): self.account_id = account_id self.new_ads = new_ads self.service_type = "AdGroupAdService" self.operations = self.getOperations() self.update = Update(self.account_id, self.service_type, self.operations) self.sendRequest()
def __init__(self, account_id, bid_updates): self.account_id = account_id self.bid_updates = bid_updates self.service_type = "AdGroupCriterionService" self.operations = self.getOperations() self.update = Update(self.account_id, self.service_type, self.operations) self.sendRequest()
class CreateAds(Update): # Arg 1 is the account id # Arg 2 is a dict of new_ads with each sub-dict containing an adgroup_id, criterion_id (keyword id) and new_bid def __init__(self, account_id, new_ads): self.account_id = account_id self.new_ads = new_ads self.service_type = "AdGroupAdService" self.operations = self.getOperations() self.update = Update(self.account_id, self.service_type, self.operations) self.sendRequest() def getOperations(self): # TODO Chunk into 2000 or fewer changes operations = [ { 'operator': 'ADD', 'operand': { 'xsi_type': 'AdGroupAd', 'adGroupId': self.new_ads[unique_id]["ad_group_id"], 'ad': { 'xsi_type': 'ExpandedTextAd', 'headlinePart1': self.new_ads[unique_id]["headlinePart1"], 'headlinePart2': self.new_ads[unique_id]["headlinePart2"], 'headlinePart3': self.new_ads[unique_id]["headlinePart3"], 'description': self.new_ads[unique_id]["description"], 'description2': self.new_ads[unique_id]["description2"], 'path1': self.new_ads[unique_id]["path1"], 'path2': self.new_ads[unique_id]["path2"], 'finalUrls': self.new_ads[unique_id]["finalUrls"], }, # Optional fields. 'status': 'ENABLED' } } for unique_id in self.new_ads] return operations def checkOperations(self): # keyword specific checks here pass def convertToMicros(self, bid): return int(bid * 10) * 100000 def sendRequest(self): keyword_response = self.update.send_request()
class UpdateKeywordBids(Update): # Arg 1 is the account id # Arg 2 is a dict of bid_updates with each sub-dict containing an adgroup_id, criterion_id (keyword id) and new_bid def __init__(self, account_id, bid_updates): self.account_id = account_id self.bid_updates = bid_updates self.service_type = "AdGroupCriterionService" self.operations = self.getOperations() self.update = Update(self.account_id, self.service_type, self.operations) self.sendRequest() def getOperations(self): operations = [{ 'operator': 'SET', 'operand': { 'xsi_type': 'BiddableAdGroupCriterion', 'adGroupId': self.bid_updates[unique_id]["ad_group_id"], 'criterion': { 'id': self.bid_updates[unique_id]["criterion_id"], }, 'biddingStrategyConfiguration': { 'bids': [{ 'xsi_type': 'CpcBid', 'bid': { 'microAmount': self.convertToMicros( self.bid_updates[unique_id]["bid"]) } }] } } } for unique_id in self.bid_updates] return operations def checkOperations(self): # keyword specific checks here pass def convertToMicros(self, bid): return int(bid * 10) * 100000 def sendRequest(self): keyword_response = self.update.send_request()
def check(self): """ 1. Get batch jobs with status != Done 2. Check status of batch job 3. If status = Done, Get mutations with batch job id = current batch job id (order by created) 4. Update mutations with results """ batch_jobs_query = self.batch_jobs_query() batch_jobs_chunks = pd.read_sql(batch_jobs_query, Database().createEngine(), chunksize=10) # Iterate over chunks try: batch_jobs_data_frame = next(batch_jobs_chunks) except StopIteration: return # Mark the batch jobs data as is_checking equal to True self.set_batch_jobs_to_processing(batch_jobs_data_frame) for index, batch_job_data_frame in batch_jobs_data_frame.iterrows(): response = Update( batch_job_data_frame['account_id'], '' ).check_batch_job_result(batch_job_data_frame['google_id']) if response: mutations_query = self.mutations_query_by_batch_job(batch_job_data_frame['id']) mutations_data_frame_chunks = pd.read_sql(mutations_query, Database().createEngine(), chunksize=2000) # Iterate over chunks try: mutations_data_frame = next(mutations_data_frame_chunks) except StopIteration: continue mutations_data_frame["is_processing"] = 0 self.update_batch_job_mutations(mutations_data_frame, response) # TODO: support all status for batch job batch_jobs_data_frame.at[index, "status"] = 'DONE' batch_jobs_data_frame["is_checking"] = 0 self.update_batch_jobs(batch_jobs_data_frame)
def process_account_mutations(self, account_data_frame, service_type): """ Process mutations for up to 2000 rows. One account_id and one type at a time. """ google_service_type = self.services_map[service_type] account_id = account_data_frame.account_id.values[0] operations = [] for index, row in account_data_frame.iterrows(): operation = Operation( row, service_type, row["action"], row["attribute"] ).get() operations.append(operation) try: print("attempting {0} mutation...".format(google_service_type)) result = Update(account_id, google_service_type, operations).send_request() except Exception as exception: print("An exception has occurred: {0}".format(exception)) return [False, str(exception)] return True, result