def main():

    # Constants.
    configpath = os.path.dirname(os.path.abspath(__file__)) + '/config'
    configfile = configpath + '/bqmonitor.conf'

    # Enable the command line interface.
    args = commandLineParser()

    # Arguments validation.
    if not validated(args.duration, args.unit):
        logger.debug('Exit.')
        exit()
    else:

        # Load the config file.
        with open(configfile, 'rb') as f:
            conf = json.load(f)

        # Define the final table columns and settings.
        failed_jobs = PrettyTable(['No.', 'Job Id', 'Creation Time', 'Dataset', 'Table', 'SourceURI', 'Failure Reason'])
        failed_jobs.sortby = 'No.'
        failed_jobs.align  = 'l'
        failed_jobs.format = True

        # Store the service account information.
        api = SetGoogleAPIFromServiceAccount(conf['api']['projectNumber'],
                                             conf['api']['serviceEmail'],
                                             configpath + '/' + conf['api']['keyFilename'])

        # Authenticate with the Google BigQuery API.
        bq = GetGoogleBigQueryClient(api)

        # Initial page token.
        page_token = None

        # Initial number of jobs.
        numOfJobs  = 0

        logger.info('Searching failed jobs in project id ' + str(conf['api']['projectNumber']) + '...')

        # Get a list of jobs as long as the counter below the limit.
        limit = (100 * args.duration * conf['api']['jobsPerMinute'] if str(args.unit).upper() == 'D' else 
                (10  * args.duration * conf['api']['jobsPerMinute'] if str(args.unit).upper() == 'H' else 
                (1   * args.duration * conf['api']['jobsPerMinute'] if str(args.unit).upper() == 'M' else 
                                       conf['api']['jobsPerMinute']))) # Otherwise, use the default limit

        while numOfJobs < limit:
            response = bq.getListOfJobs(page_token)
            if response['jobs'] is not None:

                for job in response['jobs']:

                    # Check whether the current job is failed or not.
                    isFailedJob(failed_jobs, job, args.duration, args.unit)

                    # Increase the counter.
                    numOfJobs += 1

            try:
                # Pagination; set the 'nextPageToken' used by the 'getListOfJobs' function.
                if response['nextPageToken']:
                    page_token = response['nextPageToken']
            except KeyError:
                break

        # Send an alert email if one or more failed jobs detected as failed.
        if len(failed_jobs._rows) > 0:
            notification = SendMail(**dict(conf['notifier']))
            notification.send(failed_jobs.get_html_string())
            logger.info('One or more jobs failed. A notification mail has been sent.')
        else:
            logger.info('All jobs completed successfully.')
            logger.debug('Exit.')