Exemplo n.º 1
0
    def lead_content_read(self):

        try:
            logger.info(
                "Start Reading:-  NDP - Sage NA Lead Gen - Content Synd Tracker.xlsx at "
                + str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

            read_lead_content = pd.read_excel(
                self.section_value[12] +
                "NDP - Sage NA Lead Gen - Content Synd Tracker.xlsx")

            logger.info(
                "Done Reading:-  NDP - Sage NA Lead Gen - Content Synd Tracker.xlsx at "
                + str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

            read_lead_content.rename(columns={
                "Region": "Market",
                "Media Type": "Channel",
                "Vendor": "Site",
                "Leads": "Conversions",
                "Spend": "Spend Local"
            },
                                     inplace=True)

            self.read_lead_content = read_lead_content

        except IOError as e:
            logger.error(str(e))
            pass
Exemplo n.º 2
0
 def main(self):
     self.subject_line()
     self.login(self.section_value[5], 'Password5')
     self.inbox()
     logger.info('start downloading emails at ' + str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
     self.email_check(self.section_value[12])
     self.close_connection()
     logger.info('Emails Downloaded ' + str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
Exemplo n.º 3
0
    def writing_publisher_internal(self):
        logger.info("Writing Internal Publisher Data")
        writing_internal_publisher = self.internal_publisher_data_new.to_excel(self.writer_file,
                                                                               index=False, sheet_name="Publisher "
                                                                                                       "Provided Data",
                                                                               startrow=1, startcol=1)

        logger.info("Done")
Exemplo n.º 4
0
    def writing_social_internal(self):
        logger.info("Writing Social Data")
        writing_tableau_social_data = self.social_internal_pivot_reset.to_excel(self.writer_file,
                                                                                index=False,
                                                                                sheet_name="Social Performance",
                                                                                startrow=1, startcol=1)

        logger.info("Done")
Exemplo n.º 5
0
 def save_and_close_writer(self):
     """
     To finally Save and close file
     :return: Nothing
     """
     self.writer_file.save()
     self.writer_file.close()
     logger.info("File has been created at {}".format(self.path) + str(datetime.datetime.now().
                                                                       strftime("%Y-%m-%d %H:%M")))
Exemplo n.º 6
0
    def writing_publisher_uk(self):

        logger.info("Writing UK Publisher Data")
        writing_uk_pub_data = self.uk_pub_data_reset.to_excel(self.writer_file, index=False,
                                                              sheet_name="Publisher Provided Data",
                                                              startrow=self.internal_publisher_data_new.shape[0]+4,
                                                              startcol=self.uk_pub_data_reset.shape[1]+4)

        logger.info("Done")
Exemplo n.º 7
0
    def data_reader_ndp_raw(self):

        logger.info("Start Reading:- NdpRawDataFile.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        read_ndp_data = pd.read_csv(self.section_value[9] +
                                    "NdpRawDataFile.csv")
        logger.info("Done Reading:- NdpRawDataFile.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.read_ndp_data = read_ndp_data
Exemplo n.º 8
0
    def writing_social_twitter(self):
        logger.info("Writing twitter Data")

        writing_twitter_social_data = self.twitter_data_player_reset.to_excel(self.writer_file, index=False,
                                                                              sheet_name="Social Performance",
                                                                              startrow=1,
                                                                              startcol=
                                                                              self.social_internal_pivot_reset.
                                                                              shape[1]+2)
        logger.info("Done")
Exemplo n.º 9
0
    def writing_social_facebook(self):
        logger.info("Writing facebook Data")

        writting_facbook_social_data = self.facebook_social_data.to_excel(self.writer_file, index=False,
                                                                          sheet_name="Social Performance",
                                                                          startrow=
                                                                          self.twitter_data_player_reset.shape[0]+4,
                                                                          startcol=
                                                                          self.social_internal_pivot_reset.shape[1]+2)
        logger.info("Done")
Exemplo n.º 10
0
    def __init__(self):
        super(NdpData, self).__init__()
        now = datetime.datetime.now()
        last_month = now.month-1 if now.month > 1 else 12
        last_year = now.year if now.month > 1 else now.year - 1

        self.path = self.section_value[10] + "Data Audit_GDS_All_Markets-{}-{:02d}.xlsx".format(last_year, last_month)

        logger.info("Start creating NDPFile at " + str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        self.writer_file = pd.ExcelWriter(self.path, engine="xlsxwriter", datetime_format="YYYY-MM-DD")
Exemplo n.º 11
0
    def writing_publisher_market_other_us(self):
        logger.info("Writing Other than US/CA Publisher Data")

        writing_us_ca_publisher_other = self.market_publisher_other_than_us.to_excel(self.writer_file, index=False,
                                                                                     sheet_name=
                                                                                     "Publisher Provided Data",
                                                                                     startrow=1,
                                                                                     startcol=self.
                                                                                     internal_publisher_data_new.
                                                                                     shape[1]+2)
        logger.info("Done")
Exemplo n.º 12
0
    def writing_publisher_market_us(self):
        logger.info("Writing US/CA Publisher Data")

        writng_us_ca_publisher_data = self.final_us_ca_publisher_data.to_excel(self.writer_file,
                                                                               index=False, sheet_name="Publisher "
                                                                                                       "Provided Data",
                                                                               startrow=self.
                                                                               internal_publisher_data_new.shape[0]+4,
                                                                               startcol=1)

        logger.info("Done")
Exemplo n.º 13
0
    def internal_data(self):
        # Removing unconditional rows from NDP Tableau Raw Data
        logger.info("Start removing channel from tableau data")
        self.read_ndp_data_platform = self.read_ndp_data

        remove_row_channel = self.read_ndp_data_platform[self.read_ndp_data_platform['Channel'].isin([
            'CONTENT SYNDICATION', 'OTHER', 'LEAD AGGREGATOR'])]
        self.read_ndp_data_platform = self.read_ndp_data_platform.drop(remove_row_channel.index, axis=0)

        logger.info("Start removing BR Market from tableau data")
        remove_row_market = self.read_ndp_data_platform[self.read_ndp_data_platform['Market'].isin(['BR'])]

        self.read_ndp_data_platform = self.read_ndp_data_platform.drop(remove_row_market.index, axis=0)
Exemplo n.º 14
0
    def main(self, file_path):
        body, names, emails, subjects, attachments, cc = self.get_contacts('/home/groupm/datadump/billingdata/'
                                                                           'outlookReciepientsList.csv')  # read contacts

        # set up the SMTP server
        logger.info('Setting up server with: {} '.format(self.username))
        s = smtplib.SMTP(host='smtp-mail.outlook.com', port=587)
        s.ehlo()
        s.starttls()
        s.ehlo()
        s.login(self.username, self.password)
        # For each contact, send the email:
        attachment = ''
        try:
            for msg_body, name, email, subject, attachment, cc in zip(body, names, emails, subjects, attachments, cc):
                print(file_path+attachment, emails, cc)
                msg = MIMEMultipart()  # create a message

                # setup the parameters of the message
                msg['From'] = self.username
                msg['To'] = email
                msg['Subject'] = subject
                msg['Cc'] = cc

                part = MIMEBase('application', 'octet-stream')
                part.set_payload(open(file_path + attachment, 'rb').read())
                encoders.encode_base64(part)
                part.add_header('Content-Disposition', 'attachment', filename=attachment)

                msg.attach(part)

                # add in the message body
                msg.attach(MIMEText(msg_body, 'plain'))
                print(email, cc)
                # send the message via the server set up earlier.
                # logger.info('Sending email with Subject: {} to {} '.format(subject, email))
                s.sendmail(msg['From'], (email, cc), msg.as_string())
                # logger.info('Email sent to {}: '.format(name))
        except OSError as e:
            logger.error(str(e) + attachment)
            pass
        # Terminate the SMTP session and close the connection
        s.close()
Exemplo n.º 15
0
    def internal_publisher_data(self):
        self.read_ndp_data_publisher = self.read_ndp_data
        remove_row_channel_publisher = self.read_ndp_data_publisher[self.read_ndp_data_publisher['Channel'].isin(
            ['DISPLAY', 'SEARCH', 'SOCIAL'])]
        self.read_ndp_data_publisher = self.read_ndp_data_publisher.drop(remove_row_channel_publisher.index, axis=0)

        logger.info("Start removing BR Market from tableau data")
        remove_row_market_publisher = self.read_ndp_data_publisher[self.read_ndp_data_publisher['Market'].isin(['BR'])]

        self.read_ndp_data_publisher = self.read_ndp_data_publisher.drop(remove_row_market_publisher.index, axis=0)

        publisher_data_merge_platform = [self.read_ndp_data_publisher, self.read_tableau_platform_mapping]
        merged_internal_data_publisher = reduce(lambda left, right: pd.merge(left, right, on='Channel'),
                                                publisher_data_merge_platform)

        publisher_data_merge_market = [merged_internal_data_publisher, self.read_tableau_advertiser_mapping]
        merged_publisher_data_advertiser = reduce(lambda left, right: pd.merge(left, right, on='Market'),
                                                  publisher_data_merge_market)

        self.merged_publisher_data_advertiser = merged_publisher_data_advertiser
Exemplo n.º 16
0
    def dcm_data_reader(self):

        logger.info("Start Reading:- DMC_Report.zip at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        read_dmc_data_zf = zipfile.ZipFile(self.section_value[12] +
                                           "DMC_Report.zip")
        read_dmc_data = pd.read_csv(read_dmc_data_zf.open(
            zipfile.ZipFile.namelist(read_dmc_data_zf)[0]),
                                    skiprows=9,
                                    skipfooter=1,
                                    engine='python',
                                    encoding="utf-8",
                                    parse_dates=['Date'])

        read_dmc_data = read_dmc_data[
            (read_dmc_data['Date'].dt.year == self.last_year)
            & (read_dmc_data['Date'].dt.month == self.last_month)]

        logger.info("Done Reading:- DMC_Report.zip at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.read_dmc_data = read_dmc_data
Exemplo n.º 17
0
    def ndp_static_conversion_reader(self):
        logger.info("Start Reading:- DMC_Static_Conversions.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        read_dmc_static_zf = zipfile.ZipFile(self.section_value[12] +
                                             "DMC_Static_Conversions.zip")
        read_dmc_static = pd.read_csv(read_dmc_static_zf.open(
            zipfile.ZipFile.namelist(read_dmc_static_zf)[0]),
                                      skiprows=9,
                                      skipfooter=1,
                                      engine='python',
                                      encoding="utf-8",
                                      parse_dates=['Date'])

        # read_conversion_raw_file = pd.read_csv(self.section_value[12] + "DMC_Static_Conversions.csv",
        #                                        encoding="ISO-8859-1")

        logger.info("Done Reading:- DMC_Static_Conversions.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        self.read_conversion_raw_file = read_dmc_static
Exemplo n.º 18
0
    def writing_performance(self):

        logger.info("Writing Internal Performance Data")
        writing_internal_performance = self.internal_data_performance.to_excel(self.writer_file,
                                                                               index=False, sheet_name=
                                                                               'Display Performance', startcol=1,
                                                                               startrow=1)

        logger.info("Done")

        logger.info("Writing DBM Performance Data")
        writing_dcm_performance = self.dbm_dcm_data.to_excel(self.writer_file, index=False,
                                                             sheet_name='Display Performance', startrow=1,
                                                             startcol=self.internal_data_performance.shape[1]+2)

        logger.info("Done")
Exemplo n.º 19
0
    def dbm_data_reader(self):

        logger.info("Start Reading:- DBM_Report.zip at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        read_dbm_data_zf = zipfile.ZipFile(self.section_value[12] +
                                           "DBM_Report.zip")
        read_dbm_data = pd.read_csv(read_dbm_data_zf.open(
            zipfile.ZipFile.namelist(read_dbm_data_zf)[0]),
                                    engine='python',
                                    encoding="utf-8",
                                    error_bad_lines=False)

        read_dbm_data = read_dbm_data[:read_dbm_data['Date'].isnull().idxmax()]
        read_dbm_data['Date'] = pd.to_datetime(read_dbm_data['Date'])

        read_dbm_data = read_dbm_data[
            (read_dbm_data['Date'].dt.year == self.last_year)
            & (read_dbm_data['Date'].dt.month == self.last_month)]

        logger.info("Done Reading:- DBM_Report.zip at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        self.read_dbm_data = read_dbm_data
Exemplo n.º 20
0
    def publisher_data_read(self):

        try:
            logger.info(
                "Start Reading:- Sage Global - Publisher Data - Daily.xlsx at "
                + str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

            read_publisher_data = pd.read_excel(
                self.section_value[12] +
                "Sage Global - Publisher Data - Daily.xlsx",
                sheet_name='Publisher Provided Data Sheet')

            read_publisher_data = read_publisher_data[:read_publisher_data[
                'Date'].isnull().idxmax()]

            logger.info(
                "Done Reading:- Sage Global - Publisher Data - Daily.xlsx at "
                + str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

            self.read_publisher_data = read_publisher_data

        except IOError as e:
            logger.error(str(e))
            pass
Exemplo n.º 21
0
    def uk_publisher_data(self):

        logger.info("Start Reading:-  UK Publisher files at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        path = self.section_value[14]
        files = os.listdir(path)

        files_xlsx = [f for f in files if f[-4:] == 'xlsx']

        df = pd.DataFrame()
        for f in files_xlsx:
            logger.info('Start Reading filename: - ' + f +
                        ' Sheet Name -  Sheet1 ')
            try:
                data = pd.read_excel(self.section_value[14] + f, 'Sheet1')
                data['WorkbookName'] = f
                df = df.append(data, ignore_index=True, sort=True)
            except xlrd.biffh.XLRDError as e:
                pass
                logger.error('Sheet Name Not available at file ' + f + str(e))

            logger.info(
                "Done Reading:-  UK Publisher file " + f +
                ' Sheet Name -  Sheet1 ' +
                str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        df['Week'] = df.loc[:, 'Week Number'].astype(str)
        df['NewWeek'] = df['Week'].str.extract('(\d+)').astype(float)
        df = df.dropna(subset=['Market'], how='all')
        # df['Year'] = df['Year'].astype(datetime)
        # df = df[(df['Year'].dt.year == self.last_year)]
        df.rename(columns={
            "Inquiries/Leads Delivered": "Conversions",
            "Inquiries/Leads ACCEPTED": "Leads ACCEPTED",
            "Inquiries/Leads Booked": "Leads Booked"
        },
                  inplace=True)

        df['NConversions'] = df['Conversions'].str.replace('["/" " "]', '-')
        df['NConversions'].fillna((df['Conversions']), inplace=True)
        df.fillna(0, inplace=True)
        df = df[(df['Year'] == self.last_year)]
        self.publisher_data_uk = df.loc[:, [
            'NewWeek', 'Market', 'WorkbookName', 'Delivered Budget',
            'NConversions'
        ]]
        self.publisher_data_uk.to_excel(self.writer_file,
                                        sheet_name='UKPublisherData')
        self.writer_file.save()
        exit()
Exemplo n.º 22
0
    def main(self):
        self.get_projects()
        logger.info('reading issue Description' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.issue_description()
        logger.info('Done!reading issue Description' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info('Start!creating issue for DE' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.de()
        logger.info('Done!creating issue for DE' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info('Start!creating issue for FR' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.fr()
        logger.info('Done!creating issue for FR' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info('Start!creating issue for ES' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.es()
        logger.info('Done!creating issue for ES' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info('Start!creating issue for US' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.us()
        logger.info('Done!creating issue for US' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info('Start!creating issue for UK' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.uk()
        logger.info('Done!creating issue for UK' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info('Start!creating issue for MEA' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.mea()
        logger.info('Done!creating issue for MEA' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info('Start!creating issue for AUS' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.aus()
        logger.info('Done!creating issue for AUS' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info('Start!creating issue for APAC' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        self.apac()
        logger.info('Done!creating issue for APAC' + " at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
Exemplo n.º 23
0
    def ndp_mapping_reader(self):

        logger.info("Start Reading:- staticActivityConversionMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        read_static_site_conversion = pd.read_csv(
            self.section_value[9] + "staticActivityConversionMapping.csv")

        logger.info("Done Reading:- staticActivityConversionMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info("Start Reading:- socialplatformtableumapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        read_social_site_platform = pd.read_csv(
            self.section_value[9] + "socialplatformtableumapping.csv")

        logger.info("Done Reading:- socialplatformtableumapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info("Start Reading:- siteStaticConversionMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        read_site_dcm_platform_mapping = pd.read_csv(
            self.section_value[9] + "siteStaticConversionMapping.csv")

        logger.info("Done Reading:- siteStaticConversionMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info("Start Reading:- advertiserMarketMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        read_advertiser_mapping = pd.read_csv(self.section_value[9] +
                                              "advertiserMarketMapping.csv",
                                              encoding="utf-8")

        logger.info("Done Reading:- advertiserMarketMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info("Start Reading:- advertiserMappingTableau.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        read_tableau_advertiser_mapping = pd.read_csv(
            self.section_value[9] + "advertiserMappingTableau.csv")

        logger.info("Done Reading:- advertiserMappingTableau.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info("Start Reading:- tableauPlatformMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        read_tableau_platform_mapping = pd.read_csv(
            self.section_value[9] + "tableauPlatformMapping.csv")

        logger.info("Done Reading:- tableauPlatformMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        logger.info("Start Reading:- siteDisplayPerformanceMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        read_site_display_mapping = pd.read_csv(
            self.section_value[9] + "siteDisplayPerformanceMapping.csv")

        logger.info("Done Reading:- siteDisplayPerformanceMapping.csv at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))

        self.read_static_site_conversion = read_static_site_conversion
        self.read_site_dcm_platform_mapping = read_site_dcm_platform_mapping
        self.read_advertiser_mapping = read_advertiser_mapping
        self.read_tableau_advertiser_mapping = read_tableau_advertiser_mapping
        self.read_tableau_platform_mapping = read_tableau_platform_mapping
        self.read_site_display_mapping = read_site_display_mapping
        self.read_social_site_platform = read_social_site_platform
Exemplo n.º 24
0
    def writing_conversion(self):
        logger.info("Start Writing Internal Conversions")
        write_internal_data = self.internal_conversions_new.to_excel(self.writer_file, sheet_name='Conversions',
                                                                     index=False, startrow=1, startcol=1)
        logger.info("Done")

        logger.info("Start Writing Static Conversion")
        writing_static_conversion = self.data_static_conversion_new.to_excel(self.writer_file, sheet_name='Conversions',
                                                                             index=False, startrow=1,
                                                                             startcol=self.internal_conversions_new.
                                                                             shape[1] + 2)

        logger.info("Done")

        logger.info("Start Writing Dynamic Conversion")
        writing_dynamic_conversion = self.dynamic_conversion.to_excel(self.writer_file, sheet_name='Conversions',
                                                                      index=False, startrow=1,
                                                                      startcol=self.internal_conversions_new.
                                                                      shape[1] + 2 + self.data_static_conversion_new.
                                                                      shape[1] + 1)
        logger.info("Done")
Exemplo n.º 25
0
    def file_reader(self):
        logger.info("Start Reading:-  Adwords files at " +
                    str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
        path = self.section_value[35]
        report_path = self.section_value[37]
        files = os.listdir(path)

        files_csv = [f for f in files if f[-3:] == 'csv']

        for f in files_csv:
            logger.info('Start Reading filename: - ' + f)
            try:
                data = pd.read_csv(path + f,
                                   skiprows=1,
                                   skipfooter=1,
                                   engine='python',
                                   encoding="utf-8")
                data['WorkbookName'] = f
                data_new = pd.pivot_table(data,
                                          index=['Campaign'],
                                          values=['Clicks', 'Cost'],
                                          aggfunc=pd.np.sum)
                adwords_data = data_new.reset_index()
                adwords_data = adwords_data[(adwords_data[['Clicks']] !=
                                             0).all(axis=1)]
                adwords_data['Cost'] = adwords_data['Cost'] / 1000000
                adwords_file_creation = pd.ExcelWriter(
                    report_path + os.path.splitext(f)[0] + ".xlsx",
                    engine="xlsxwriter",
                    datetime_format="YYYY-MM-DD")
                adwords_data.to_excel(adwords_file_creation,
                                      index=False,
                                      startrow=6)

                data_info = pd.read_csv(self.section_value[34] +
                                        'Client_id_mcc.csv',
                                        engine='python',
                                        encoding="utf-8")

                data_info = data_info.drop('MCCID', 1)
                # data_info_new = data_info.transpose()
                data_info_new = data_info.set_index('Client').T
                # data_info_new.reset_index()
                print(data_info_new)
                # exit()
                # for index, row in data_info.iterrows():
                data_info_new.to_excel(adwords_file_creation,
                                       index=True,
                                       startrow=0,
                                       startcol=0)
                workbook = adwords_file_creation.book
                worksheet = adwords_file_creation.sheets['Sheet1']
                worksheet.hide_gridlines(2)
                worksheet.set_zoom(75)
                format_header = workbook.add_format({
                    "bold": True,
                    "bg_color": "#00B0F0",
                    "border": 1
                })
                worksheet.conditional_format(6, 0, 6, adwords_data.shape[1], {
                    "type": "no_blanks",
                    "format": format_header
                })

                border_row = workbook.add_format({"border": 1})
                bold_format = workbook.add_format({
                    "bold": True,
                    "bg_color": "#00B0F0",
                    "border": 1,
                    "num_format": "#,##0"
                })
                worksheet.conditional_format(7, 0, adwords_data.shape[0] + 7,
                                             adwords_data.shape[1], {
                                                 "type": "no_blanks",
                                                 "format": border_row
                                             })

                number_format = workbook.add_format({"num_format": "#,##0"})
                dollar_format = workbook.add_format({"num_format": "#,##0.00"})
                worksheet.conditional_format(7, 1, adwords_data.shape[0] + 7,
                                             1, {
                                                 "type": "no_blanks",
                                                 "format": number_format
                                             })

                worksheet.conditional_format(7, 2, adwords_data.shape[0] + 7,
                                             2, {
                                                 "type": "no_blanks",
                                                 "format": dollar_format
                                             })

                worksheet.write_string(adwords_data.shape[0] + 7, 0, 'Total',
                                       bold_format)
                worksheet.write_string(0, 0, 'Client')
                worksheet.write_formula(
                    adwords_data.shape[0] + 7, 1,
                    '=SUM(B{}:B{})'.format(8, adwords_data.shape[0] + 7),
                    bold_format)
                worksheet.write_formula(
                    adwords_data.shape[0] + 7, 2,
                    '=SUM(C{}:C{})'.format(8, adwords_data.shape[0] + 7),
                    bold_format)

                adwords_file_creation.save()
                adwords_file_creation.close()
            except (pandas.io.common.EmptyDataError, KeyError):
                pass
                logger.error(f + "is empty")

            logger.info(
                "Done Reading:-  Adwords file " + f +
                str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))