Exemple #1
0
def createNotification(account_id):

    #only when running for the first time...
    def isFirstRun(account_id):
        query = "select ad_performance_report_processed_at from accounts where id = '%s'" % (
            account_id)
        results = Database().executeQuery(query)
        for result in results:
            return result[0] is None

    if not isFirstRun(account_id):
        Log('info',
            "the notification won't be created as this isn't the first run",
            '', account_id)
        return

    Log('info', "creating successful sync notification", '', account_id)

    user_id = Helpers().getUserIdFromAccountId(account_id)
    account_name = Helpers().getAccountNameFromId(account_id)
    account_google_id = Helpers().getAccountGoogleIdFromId(account_id)
    username = Helpers().getUsername(account_id)
    query = r"""
    insert into notifications 
    (id, type, notifiable_id, notifiable_type, data, created_at, updated_at)
    values
    ('%s', 'App\\Notifications\\AccountSynced', '%s','App\\User',
    '{"message":"%s was synced successfully! Refesh the page to access the account."}', now(), now())
    """ % (str(uuid.uuid4()), user_id, account_name)
    Database().executeQuery(query)

    sendEmail(account_name, account_google_id, username, account_id)
Exemple #2
0
 def __init__(self):
     self.services_map = {
         "campaign": "CampaignService",
         "keyword": "AdGroupCriterionService",
         "advert": "AdGroupAdService"
     }
     self.helpers = Helpers()
 def __init__(self, account_id, service_type, operations=None):
     self.settings = Settings()
     self.helpers = Helpers()
     self.debug_mode = self.settings.envvars["APP_DEBUG"] == "true"
     self.account_id = account_id
     self.service_type = service_type
     self.operations = operations
     self.refresh_token = self.helpers.getRefreshToken(account_id)
     self.client_customer_id = self.helpers.getClientCustomerID(
         self.settings, account_id)
def main():
    """Download data from the api
    Process the data ready for the app"""

    Log("info", "process_account running", "from process_account.py")

    parser = argparse.ArgumentParser()
    parser.add_argument("-a")
    args = parser.parse_args()

    try:
        account_id = args.a
        if (not account_id):
            Log('info', 'Please specify an account id with -a')
            return
        if not Helpers().isActiveAccount(account_id):
            Log("info", "this account isn't active. Exiting", '', account_id)
            return
        download.main(account_id)
    except:
        Log("error", "error starting every night from command line",
            traceback.format_exc())
        raise

    Log("info", "firing run_budget_commander command", '', account_id)
    myProcess().main("run_budget_commander.py", account_id)
def main():
    """Run each morning after data processing has occured
     - This is triggered to run after every_night.py
     """

    parser = argparse.ArgumentParser()
    parser.add_argument("-a")
    args = parser.parse_args()

    try:
        account_id = args.a
        if (not account_id):
            print('Please specify an account id with -a')
            return
        if not Helpers().isActiveAccount(account_id):
            Log("info", "this account isn't active. Exiting", '', account_id)
            return
        NotifyViaEmail(account_id).main()
        MonthlyStop(account_id)

        ControlSpend(account_id).main()

    # TODO: catch proper exception
    except:
        Log("error",
            "error starting run_budget_commander.py from command line",
            traceback.format_exc())
        # TODO: return proper exception
        raise
Exemple #6
0
    def addUserAccounts(self, user_id, first_run):

        if not Helpers().isActiveUser(user_id):
            Log("info", "this user isn't active. Exiting",
                'user_id: %s' % (user_id))
            return

        try:

            Log("info", "adding accounts for user id '%s'" % user_id)
            self.user_id = user_id
            accounts_df = self.getAccountsDf()
            if functions.dfIsEmpty(accounts_df):
                return
            accounts_df = accounts_df.drop_duplicates('google_id')
            accounts_df = self.dropDuplicates(accounts_df, first_run)
            if (accounts_df.shape[0] == 0 and first_run):
                Log('warning',
                    "no unique google accounts were found for this user",
                    "user_id (%s)" % (user_id), "")
            accounts_df.to_sql("accounts",
                               Database().createEngine(),
                               index=False,
                               if_exists="append")
        except Exception as exception:
            Log("error",
                str(exception) + " (User id: %s)" % (user_id),
                traceback.format_exc())

        Log("info", "finished adding account meta data")
 def __init__(self, account_id, date_range_string, options):
     self.options = options
     self.account_id = account_id
     self.date_range_string = date_range_string
     self.save_report_as_name = self.date_range_string + ".csv"
     self.report_name = options["report_name"]
     self.performance_table_name = options["performance_table_name"]
     self.entity_table_name = options["entity_table_name"]
     self.entity_id_name = options["entity_id_name"]
     self.where_string = options["where_string"]
     self.queryColumnsToTableColumns = options["queryColumnsToTableColumns"]
     self.queryColumnsToDownloadColumns = options[
         "queryColumnsToDownloadColumns"]
     self.settings = Settings()
     self.helpers = Helpers()
     self.moneyFields = ["cost"]
     self.rate_errors = 0
Exemple #8
0
def sendEmail(account_name, account_google_id, username, account_id):
    subject = "Account '%s' (%s) | Was Synced Successfully" % (
        account_name, account_google_id)

    html_content = getHtmlContent(username, account_name)

    email_address = Helpers().getUserEmail(account_id)

    Log("info", "Sending account success email",
        "%s - send to: %s" % (subject, email_address), account_id)

    Email.send(("*****@*****.**", "AdEvolver"), str(email_address), subject,
               html_content)
Exemple #9
0
    def getClient(user_id=None, account_id=None, client_customer_id=None):
        """Returns the adwords client
        Manager level if a user_id is provided
        Client (account) level if account_id and client_customer_id are provided
        """
        helpers = Helpers()
        settings = Settings()

        if user_id is not None:
            refresh_token = helpers.getRefreshTokenFromUserId(user_id)
        else:
            refresh_token = helpers.getRefreshToken(account_id)

        if not refresh_token:
            Log("info",
                "Can't determine refresh_token for user %s " % (user_id), '',
                account_id)
            return

        yaml_data = helpers.getYamlData()["adwords"]
        client_id = yaml_data["client_id"]
        client_secret = yaml_data["client_secret"]
        developer_token = yaml_data["developer_token"]
        oauth_client = oauth2.GoogleRefreshTokenClient(
            client_id=client_id,
            client_secret=client_secret,
            refresh_token=refresh_token)

        if client_customer_id is not None:
            adwords_client = adwords.AdWordsClient(
                developer_token,
                oauth_client,
                client_customer_id=client_customer_id)
        else:
            adwords_client = adwords.AdWordsClient(developer_token,
                                                   oauth_client)

        return adwords_client
def runProcessAccountAndBudgetCommander(account_id):
    """Run the account once the time is after 3am (local time) but only run once per day
    Run Budget Commander features after processing because they use the data from the download
    """

    if not Schedule().accountShouldRun(account_id):
        return

    if Helpers().isActiveAccount(account_id):
        try:
            download.main(account_id)
            NotifyViaEmail(account_id).main()
            MonthlyStop(account_id)
            ControlSpend(account_id).main()
        except Exception as exception:
            Log("error", str(exception), traceback.format_exc(), account_id)
def main():
    """Runs emergency stop"""
    Log("info", "run_emergency_stop running", "Output to run_emergency_stop table")

    parser = argparse.ArgumentParser()
    parser.add_argument("-a")
    args = parser.parse_args()

    try:
        account_id = args.a
        if not account_id:
            print('Please specify an account id with -a')
            return
        if not Helpers().isActiveAccount(account_id):
            Log("info", "this account isn't active. Exiting", '', account_id)
            return
        EmergencyStop(account_id)
    except Exception as exception:
        Log("error", str(exception), traceback.format_exc(), account_id)
        raise
Exemple #12
0
    def update_batch_job_mutations(self, account_data_frame, response):
        batch_job_response = Helpers.normalize_batch_job_response(response)

        if batch_job_response:
            is_success = False
            message = ''
            query_string = "update mutations set response = '{0}', executed_at = now(), is_success = {1}, is_processing={2}  where id = '{3}'"

            for data in batch_job_response:
                index = int(data['index'])

                if 'result' in data:
                    is_success = True
                    message = ''
                elif 'errorList' in data:
                    is_success = False
                    message = self.helpers.errors_to_message(data['errorList'])

                # Query to DB
                query = query_string.format(
                    message, is_success, account_data_frame.iloc[index, :]['is_processing'],
                    account_data_frame.iloc[index, :]['id'],
                )
                Database().executeQuery(query)
Exemple #13
0
class Worker(object):
    """Read from the mutations queue and make API requests
    Designed with multi-processing in mind."""

    def __init__(self):
        self.services_map = {
            "campaign": "CampaignService",
            "keyword": "AdGroupCriterionService",
            "advert": "AdGroupAdService"
        }
        self.helpers = Helpers()

    def work(self):
        for service_type in self.services_map:
            self.process_service_type(service_type, "set", "bid")
            self.process_service_type(service_type, "set", "status")
            self.process_service_type(service_type, "add")

    def process_service_type(self, service_type, action, attribute=None):
        """ Grab rows from the queue filtered by service type, action and attribute
        Process the rows (apply mutations to the API)
        Run each chunk once. The next process will pick up the next chunk.
        """
        # Query data from mutations table
        query = self.mutations_query(service_type, action, attribute)
        mutations_data_frame_chunks = pd.read_sql(query, Database().createEngine(), chunksize=2000)

        # Iterate over chunks
        while True:
            try:
                mutations_data_frame = next(mutations_data_frame_chunks)
            except StopIteration:
                return

            # Mark the chunk data as is_processing equal to True
            self.set_data_frame_to_processing(mutations_data_frame)

            account_ids = mutations_data_frame.account_id.drop_duplicates().values

            for account_id in account_ids:
                account_data_frame = mutations_data_frame[mutations_data_frame.account_id == account_id].drop_duplicates()
                # Send mutations to google ads API
                result = self.process_account_mutations(account_data_frame, service_type)

                # Write data to DB
                self.handle_mutations_result(result, account_data_frame)

    def check(self):
        """
        1. Get batch jobs with status != Done
        2. Check status of batch job
        3. If status = Done, Get mutations with batch job id = current batch job id (order by created)
        4. Update mutations with results
        """
        batch_jobs_query = self.batch_jobs_query()
        batch_jobs_chunks = pd.read_sql(batch_jobs_query, Database().createEngine(), chunksize=10)

        # Iterate over chunks
        try:
            batch_jobs_data_frame = next(batch_jobs_chunks)
        except StopIteration:
            return

        # Mark the batch jobs data as is_checking equal to True
        self.set_batch_jobs_to_processing(batch_jobs_data_frame)

        for index, batch_job_data_frame in batch_jobs_data_frame.iterrows():
            response = Update(
                batch_job_data_frame['account_id'], ''
            ).check_batch_job_result(batch_job_data_frame['google_id'])

            if response:
                mutations_query = self.mutations_query_by_batch_job(batch_job_data_frame['id'])
                mutations_data_frame_chunks = pd.read_sql(mutations_query, Database().createEngine(), chunksize=2000)

                # Iterate over chunks
                try:
                    mutations_data_frame = next(mutations_data_frame_chunks)
                except StopIteration:
                    continue

                mutations_data_frame["is_processing"] = 0
                self.update_batch_job_mutations(mutations_data_frame, response)

                # TODO: support all status for batch job
                batch_jobs_data_frame.at[index, "status"] = 'DONE'

        batch_jobs_data_frame["is_checking"] = 0
        self.update_batch_jobs(batch_jobs_data_frame)

    def set_data_frame_to_processing(self, mutations_data_frame):
        """ Set is_processing to true so that other processes won't work on the same rows
        """
        mutations_data_frame["is_processing"] = 1
        self.update_mutations(mutations_data_frame, "", 0)

    def set_batch_jobs_to_processing(self, batch_jobs_data_frame):
        """ Set is_processing to true so that other processes won't work on the same rows
        """
        batch_jobs_data_frame["is_checking"] = 1
        self.update_batch_jobs(batch_jobs_data_frame)

    def process_account_mutations(self, account_data_frame, service_type):
        """ Process mutations for up to 2000 rows. One account_id and one type at a time.
        """
        google_service_type = self.services_map[service_type]
        account_id = account_data_frame.account_id.values[0]

        operations = []
        for index, row in account_data_frame.iterrows():
            operation = Operation(
                row, service_type, row["action"], row["attribute"]
            ).get()
            operations.append(operation)

        try:
            print("attempting {0} mutation...".format(google_service_type))
            result = Update(account_id, google_service_type, operations).send_request()
        except Exception as exception:
            print("An exception has occurred: {0}".format(exception))
            return [False, str(exception)]

        return True, result

    def handle_mutations_result(self, result, account_data_frame):
        account_data_frame["is_processing"] = 0

        if result[1] == "debug mode":
            self.update_mutations(account_data_frame, result[1], 1)
            return

        if result[0] is False:  # handle error
            msg = '{0}'.format(result[1])
            is_success = 0
            self.update_mutations(account_data_frame, msg, is_success)
            return

        if settings.is_batch_job_processing:
            if result[0] is True:
                if settings.is_processing_batch_job_async:
                    batch_job_uuid_string = self.insert_batch_job(
                        result[1]['batch_job_status'], result[1]['batch_job_id']
                    )

                    account_data_frame["is_processing"] = 1
                    account_data_frame["batch_job_id"] = batch_job_uuid_string

                    self.update_mutations(account_data_frame, '', False)
                else:
                    self.update_batch_job_mutations(account_data_frame, result[1])

        else:
            if result[0] is True:
                message = ''
                is_success = 1
                self.update_mutations(account_data_frame, message, is_success)
                return

            if result[1]["partialFailureErrors"]:
                message = '{0}'.format(result["partialFailureErrors"])
                is_success = 0
                self.update_mutations(account_data_frame, message, is_success)
                return

    def mutations_query(self, service_type, action, attribute=None):
        """ Return a query from request data from DB

        :param service_type:
        :param action:
        :param attribute:
        :return:
        """
        query = """
            select * from mutations
            where (is_success != 1 or isnull(is_success))
            and (is_processing != 1 or isnull(is_processing))
            and type = "{0}"
            and action = "{1}"
        """.format(
            service_type, action
        )

        if attribute:
            query += " and attribute = '{0}'".format(attribute)

        query += " order by account_id, created_at"

        return query

    def mutations_query_by_batch_job(self, batch_job_id):
        query = """
            select * from mutations
            where (is_success != 1 or isnull(is_success))
            and batch_job_id = '{0}'
        """.format(
            batch_job_id
        )

        query += " order by account_id, created_at"

        return query

    def batch_jobs_query(self):
        query = """
            SELECT DISTINCT(batch_job.id), batch_job.is_checking, batch_job.google_id,
            status, mut.account_id
            FROM batch_job INNER JOIN mutations as mut on batch_job.id = mut.batch_job_id
            WHERE batch_job.status != 'DONE' and batch_job.is_checking is False;
        """

        return query

    def insert_batch_job(self, batch_job_status, batch_job_google_id):
        # Create batch job
        uuid_string = uuid.uuid1().__str__()
        query = "INSERT INTO batch_job (`id`, `status`, `google_id`) VALUES ('{0}','{1}', '{2}')".format(
            uuid_string, batch_job_status, batch_job_google_id
        )
        Database().createEngine().execute(query)

        return uuid_string

    def update_batch_jobs(self, batch_jobs_data_frame):
        for index, row in batch_jobs_data_frame.iterrows():
            query = "update batch_job set is_checking={0}, status='{1}' where id='{2}'".format(
                row["is_checking"], row["status"], row["id"]
            )

            Database().executeQuery(query)

    def update_mutations(self, account_data_frame, message, is_success):
        for i, row in account_data_frame.iterrows():
            if row['batch_job_id'] is None:
                row['batch_job_id'] = 'null'
            else:
                row['batch_job_id'] = "'%s'" %(row['batch_job_id'])
            query = "update mutations set response = '{0}', executed_at=now(), is_success={1}, is_processing={2}, batch_job_id={3} where id = '{4}'".format(
                message, is_success, row["is_processing"],row['batch_job_id'] , row["id"],
            )

            Database().executeQuery(query)

    def update_batch_job_mutations(self, account_data_frame, response):
        batch_job_response = Helpers.normalize_batch_job_response(response)

        if batch_job_response:
            is_success = False
            message = ''
            query_string = "update mutations set response = '{0}', executed_at = now(), is_success = {1}, is_processing={2}  where id = '{3}'"

            for data in batch_job_response:
                index = int(data['index'])

                if 'result' in data:
                    is_success = True
                    message = ''
                elif 'errorList' in data:
                    is_success = False
                    message = self.helpers.errors_to_message(data['errorList'])

                # Query to DB
                query = query_string.format(
                    message, is_success, account_data_frame.iloc[index, :]['is_processing'],
                    account_data_frame.iloc[index, :]['id'],
                )
                Database().executeQuery(query)
class Update:
    def __init__(self, account_id, service_type, operations=None):
        self.settings = Settings()
        self.helpers = Helpers()
        self.debug_mode = self.settings.envvars["APP_DEBUG"] == "true"
        self.account_id = account_id
        self.service_type = service_type
        self.operations = operations
        self.refresh_token = self.helpers.getRefreshToken(account_id)
        self.client_customer_id = self.helpers.getClientCustomerID(
            self.settings, account_id)

    def getClient(self):
        return GoogleAdsApi.getClient(None, self.account_id,
                                      self.client_customer_id)

    def getService(self):
        client = self.getClient()
        service = client.GetService(self.service_type,
                                    version=Settings.api_version())

        return service

    def send_request(self):
        if self.debug_mode:
            Log(
                "debug", "Mutations will not run in debug mode.",
                "There are {0} attempted operations".format(
                    len(self.operations)))

            return "debug mode"  # IMPORTANT: needs to be this exact text

        if self.settings.is_batch_job_processing:
            if self.settings.is_processing_batch_job_async:
                return self.send_batch_job_request_async()
            else:
                return self.send_batch_job_request_sync()

        return self.send_mutation_request()

    def check_batch_job_result(self, batch_job_id):
        download_url = self.check_batch_job(batch_job_id)

        response = None
        if download_url:
            batch_job_helper = self.getClient().GetBatchJobHelper(
                version=Settings.api_version())
            response = batch_job_helper.ParseResponse(
                urlopen(download_url).read())

        return response

    def send_mutation_request(self):
        criteria = self.getService().mutate(self.operations)

        return criteria

    def send_batch_job_request_sync(self):
        batch_job_helper = self.getClient().GetBatchJobHelper(
            version=Settings.api_version())
        batch_job = self.add_batch_job()

        print('Created BatchJob with ID "{0}", status "{1}"'.format(
            batch_job['id'], batch_job['status']))

        upload_url = batch_job['uploadUrl']['url']
        batch_job_helper.UploadOperations(
            upload_url,
            self.operations,
        )

        download_url = self.get_batch_job_download_url_when_ready(
            batch_job['id'])
        response = batch_job_helper.ParseResponse(urlopen(download_url).read())

        self.helpers.print_batch_job_response(response)

        return response

    def send_batch_job_request_async(self):
        batch_job_helper = self.getClient().GetBatchJobHelper(
            version=Settings.api_version())
        batch_job = self.add_batch_job()

        print('Created BatchJob with ID "{0}", status "{1}"'.format(
            batch_job['id'], batch_job['status']))

        upload_url = batch_job['uploadUrl']['url']
        batch_job_helper.UploadOperations(
            upload_url,
            self.operations,
        )

        return {
            'batch_job_id': batch_job['id'],
            'batch_job_status': batch_job['status']
        }

    def add_batch_job(self):
        """ Add a new BatchJob to upload operations to.

        :return: The new BatchJob created by the request.
        """
        # Initialize appropriate service.
        client = self.getClient()
        batch_job_service = client.GetService('BatchJobService',
                                              version=Settings.api_version())

        # Create a BatchJob.
        batch_job_operations = [{'operand': {}, 'operator': 'ADD'}]

        return batch_job_service.mutate(batch_job_operations)['value'][0]

    def get_batch_job(self, client, batch_job_id):
        """ Retrieves the BatchJob with the given id.

        :param client: an instantiated AdWordsClient used to retrieve the BatchJob.
        :param batch_job_id: a long identifying the BatchJob to be retrieved.
        :return: The BatchJob associated with the given id.
        """
        batch_job_service = client.GetService('BatchJobService',
                                              Settings.api_version())

        selector = {
            'fields': ['Id', 'Status', 'DownloadUrl'],
            'predicates': [{
                'field': 'Id',
                'operator': 'EQUALS',
                'values': [batch_job_id]
            }]
        }

        return batch_job_service.get(selector)['entries'][0]

    def check_batch_job(self, batch_job_id):
        batch_job = self.get_batch_job(self.getClient(), batch_job_id)

        if batch_job['status'] in PENDING_STATUSES:
            return None

        elif 'downloadUrl' in batch_job and batch_job[
                'downloadUrl'] is not None:
            return batch_job['downloadUrl']['url']

    def get_batch_job_download_url_when_ready(
            self, batch_job_id, max_poll_attempts=MAX_POLL_ATTEMPTS):
        """ Retrieves the downloadUrl when the BatchJob is complete.

        :param batch_job_id: a long identifying the BatchJob to be polled.
        :param max_poll_attempts: an int defining the number of times the BatchJob will be
            checked to determine whether it has completed.
        :return: A str containing the downloadUrl of the completed BatchJob.
        :rtype: str
        :raise: Exception: If the BatchJob hasn't finished after the maximum poll attempts
            have been made.
        """
        batch_job = self.get_batch_job(self.getClient(), batch_job_id)
        poll_attempt = 0

        while poll_attempt in range(
                max_poll_attempts) and batch_job['status'] in PENDING_STATUSES:

            sleep_interval = (30 * (2**poll_attempt) +
                              (random.randint(0, 10000) / 1000))
            print('Batch Job not ready, sleeping for %s seconds.' %
                  sleep_interval)
            time.sleep(sleep_interval)

            batch_job = self.get_batch_job(self.getClient(), batch_job_id)
            poll_attempt += 1

            if 'downloadUrl' in batch_job and batch_job[
                    'downloadUrl'] is not None:
                url = batch_job['downloadUrl']['url']
                print('Batch Job with Id "{0}", Status "{1}" ready.'.format(
                    batch_job['id'], batch_job['status']))

                return url

        raise Exception(
            'Batch Job not finished downloading. Try checking later.')
class Report:
    def __init__(self, account_id, date_range_string, options):
        self.options = options
        self.account_id = account_id
        self.date_range_string = date_range_string
        self.save_report_as_name = self.date_range_string + ".csv"
        self.report_name = options["report_name"]
        self.performance_table_name = options["performance_table_name"]
        self.entity_table_name = options["entity_table_name"]
        self.entity_id_name = options["entity_id_name"]
        self.where_string = options["where_string"]
        self.queryColumnsToTableColumns = options["queryColumnsToTableColumns"]
        self.queryColumnsToDownloadColumns = options[
            "queryColumnsToDownloadColumns"]
        self.settings = Settings()
        self.helpers = Helpers()
        self.moneyFields = ["cost"]
        self.rate_errors = 0

    def createAccountDirectory(self):
        write_directory = os.path.abspath(
            os.path.join(self.settings.storage_dir, self.account_id))
        if not os.path.exists(write_directory):
            os.makedirs(write_directory)

    def createReportDirectory(self):
        report_directory = os.path.abspath(
            os.path.join(self.settings.storage_dir, self.account_id,
                         self.report_name))
        if not os.path.exists(report_directory):
            os.makedirs(report_directory)

    def downloadReport(self, account_id, where_string):

        settings = Settings()
        if settings.envvars["SKIP_DOWNLOADS"] == "true":
            print(
                "SKIP_DOWNLOADS set to true in the env, skipping the download")
            return

        columns = ",".join(self.queryColumnsToTableColumns.keys())

        client_customer_id = self.helpers.getClientCustomerID(
            self.settings, account_id)

        client = GoogleAdsApi.getClient(None, account_id, client_customer_id)

        if not client:
            Log(
                'warning',
                "tried running Report but there's no client. Exiting",
                "This is probably due to a missing data such as refresh token",
                account_id)
            return

        # Initialize appropriate service.
        report_downloader = client.GetReportDownloader(version='v201809')

        report_query = "select %s from %s" % (columns, self.report_name)
        report_query += " " + where_string
        report_query += " during %s" % (functions.dateRangeFromDays(
            self.date_range_string, account_id))

        write_path = os.path.abspath(
            os.path.join(self.settings.storage_dir, self.account_id,
                         self.report_name, self.save_report_as_name))

        with open(write_path, 'w', encoding='utf-8') as output_file:
            try:
                report_downloader.DownloadReportWithAwql(
                    report_query,
                    'CSV',
                    output_file,
                    skip_report_header=True,
                    skip_column_header=False,
                    skip_report_summary=True,
                    include_zero_impressions=True,
                    client_customer_id=client_customer_id)

            except Exception as exception:
                if str(exception).find(
                        "ZERO_IMPRESSIONS_REQUEST_NOT_SUPPORTED") > -1:
                    report_downloader.DownloadReportWithAwql(
                        report_query,
                        'CSV',
                        output_file,
                        skip_report_header=True,
                        skip_column_header=False,
                        skip_report_summary=True,
                        client_customer_id=client_customer_id)
                elif str(exception).lower().find("rate") > -1:
                    # rate exceeded error exponential backoff - the 10th error will be 55 minutes
                    Log("error", str(exception),
                        "tries: %s" % (self.rate_errors), self.account_id)

                    time.sleep(
                        (self.rate_errors * (self.rate_errors + 1)) * 30)
                    self.downloadReport(account_id, where_string)
                    self.rate_errors += 1

                else:
                    raise

    def convertCsvToDataframe(self):

        write_directory = os.path.abspath(
            os.path.join(self.settings.storage_dir, self.account_id,
                         self.report_name))
        write_path = os.path.abspath(
            os.path.join(write_directory, self.save_report_as_name))

        df = pd.read_csv(write_path)

        return df

    def basicProcessing(self, df):
        downloadColumnsToTableColumns = {}
        for col in self.queryColumnsToDownloadColumns:
            downloadColumn = self.queryColumnsToDownloadColumns[col]
            tableColumn = self.queryColumnsToTableColumns[col]
            downloadColumnsToTableColumns[downloadColumn] = tableColumn

        df.reset_index(inplace=True)
        df["created_at"] = datetime.now()
        df["updated_at"] = datetime.now()
        df["account_id"] = self.account_id

        df = df.rename(columns=downloadColumnsToTableColumns)

        for field in self.moneyFields:
            if field in list(df.columns):
                df[field] = df[field] / 1000000

        # #add the calculated metrics (ctr, etc)
        settings = Settings()
        for metric in settings.calculatedMetrics:
            operator = settings.calculatedMetrics[metric][1]
            first_metric = settings.calculatedMetrics[metric][0]
            second_metric = settings.calculatedMetrics[metric][2]
            if first_metric not in df.columns:
                continue
            if second_metric not in df.columns:
                continue

            if operator == "/":
                df[metric] = df[first_metric] / \
                             df[second_metric]
                continue
            if settings.calculatedMetrics[metric][3]:
                df[metric] = df[metric] * 100

        return df

    # entities table such as adverts, keywords i.e. not performance
    def writeToEntitiesTable(self, df, report, account_id):
        settings = Settings()

        delete_query = "delete from %s where account_id = '%s'" % (
            self.entity_table_name, account_id)
        Database().executeQuery(delete_query)

        # we only need to write the keyword data in once
        # for the longest range to cover all keywords
        final_date_range = settings.date_ranges[len(settings.date_ranges) - 1]
        df = df[df.date_range == final_date_range]
        df['account_id'] = account_id

        df = df.reset_index(drop=True).drop_duplicates()

        report.writeDataframeToTable(df, self.entity_table_name)

    def writeToPerformanceTable(self, df, report, account_id):
        delete_query = "delete from %s where account_id = '%s' " % (
            self.performance_table_name, account_id)
        Database().executeQuery(delete_query)
        df["id"] = pd.Series([uuid.uuid1()
                              for i in range(len(df))]).astype(str)
        report.writeDataframeToTable(df, self.performance_table_name)

    def writeDataframeToTable(self, df, database_table_name):
        df = functions.trimDfToTableColumns(df, database_table_name)
        functions.append_df_to_sql_table(df, database_table_name)

    def stringifyIds(self, df):
        for column in df.columns:
            if "id" in column.lower():
                df[column] = df[column].astype(str)

        return df

    def createDfWithAllDateRanges(self, account_id):
        all_df = None

        settings = Settings()
        for date_range in settings.date_ranges:

            if date_range == "THIS_MONTH" and LocalDates(
                    account_id).is_first_of_month:
                continue

            report = Report(account_id, date_range, self.options)
            report.createAccountDirectory()
            report.createReportDirectory()
            report.downloadReport(account_id, report.where_string)

            df = report.convertCsvToDataframe()

            df["date_range"] = date_range

            if df.shape[0] == 0:
                print("%s df is empty" % (date_range))
                continue

            if functions.dfIsEmpty(all_df):
                all_df = df.copy()
            else:
                all_df = all_df.append(df.copy())

        if functions.dfIsEmpty(all_df):
            Log("info", "%s report is empty" % (self.report_name), "",
                self.account_id)
            return

        all_df = report.stringifyIds(all_df)

        return all_df.reset_index()
def runEmergencyStop(account_id):
    if Helpers().isActiveAccount(account_id):
        try:
            EmergencyStop(account_id)
        except Exception as exception:
            Log("error", str(exception), traceback.format_exc(), account_id)