def csv_from_excel(): fname = '../External_Links_Report_Module_Breakdown_with_Summary_5_22_15.xlsx' #fname = '../test.xlsx' rcDict = getExcelData.getFileData(fname) conn = MySQLdb.connect(host='localhost', user='******', passwd='tas123', db='Lexis_Link_Monitoring_New') cursor = conn.cursor() sheets = rcDict.keys() sheets.sort() for sheet in sheets: sheetdata = rcDict[sheet] nrows = sheetdata['nrows'] ncols = sheetdata['ncols'] sheet_name = sheetdata['sheet_name'] cell_dict = sheetdata['cell_dict'] for row in range(1, nrows): print "===========================================" rowwiseData = [] for col in range(0, ncols): data = cell_dict.get((row, col), None) if data: dt = data.get('data', None) try: dt = convert(dt) except: pass rowwiseData.append(dt.encode('ascii', 'xmlcharrefreplace')) print rowwiseData sqlst = "insert into %s(external_link_label,topic_tree_location,external_link_address,response,response_category,ping_date_time,redirect_url,week_no) values('%s','%s','%s','%s','%s','%s','%s',%s)" %(sheet_name,rowwiseData[0],rowwiseData[1],rowwiseData[2],'','','','',3) cursor.execute(sqlst) return 'Done';
def csv_from_excel(): fname = '../External_Links_Report_Module_Breakdown_with_Summary_5_22_15.xlsx' #fname = '../test.xlsx' rcDict = getExcelData.getFileData(fname) conn = MySQLdb.connect(host='localhost', user='******', passwd='tas123', db='Lexis_Link_Monitoring_New') cursor = conn.cursor() sheets = rcDict.keys() sheets.sort() for sheet in sheets: sheetdata = rcDict[sheet] nrows = sheetdata['nrows'] ncols = sheetdata['ncols'] sheet_name = sheetdata['sheet_name'] cell_dict = sheetdata['cell_dict'] for row in range(1, nrows): print "===========================================" rowwiseData = [] for col in range(0, ncols): data = cell_dict.get((row, col), None) if data: dt = data.get('data', None) try: dt = convert(dt) except: pass rowwiseData.append(dt.encode('ascii', 'xmlcharrefreplace')) print rowwiseData sqlst = "insert into %s(external_link_label,topic_tree_location,external_link_address,response,response_category,ping_date_time,redirect_url,week_no) values('%s','%s','%s','%s','%s','%s','%s',%s)" % ( sheet_name, rowwiseData[0], rowwiseData[1], rowwiseData[2], '', '', '', '', 3) cursor.execute(sqlst) return 'Done'
def csv_from_excel(): fname = '../External_Links_Report_Module_Breakdown_with_Summary_8_14_15.xls' #fname = '../test.xlsx' rcDict = getExcelData.getFileData(fname) conn = MySQLdb.connect(host='localhost', user='******', passwd='tas123', db='Lexis_Link_Monitoring_week14') cursor = conn.cursor() table_lst = { 'B&F': 'banking_finance', 'Business Law': 'business_law', 'CA Bus Entity': 'california_business_entity_selection_formation', 'Combined Bus Entity Selection': 'combined_california_general_business_law', 'Bankruptcy': 'commercial_bankrtupcy', 'Corp Counsel': 'corporate_counsel', 'IP': 'intellectual_property', 'L&E': 'labor_employment', 'M&A': 'm_a', 'NY B&C': 'ny_business_commercial', 'Real Estate': 'real_estate', 'S&CM': 'securities_capital_markets', 'Texas B&C': 'texas_business_commercial' } sheets = rcDict.keys() sheets.sort() i = 1 for sheet in sheets: if (i == 1): i += 1 continue sheetdata = rcDict[sheet] nrows = sheetdata['nrows'] ncols = sheetdata['ncols'] sheet_name = sheetdata['sheet_name'] sheet_name = table_lst[sheet_name] cell_dict = sheetdata['cell_dict'] for row in range(3, nrows): print "===========================================" rowwiseData = [] for col in range(1, ncols): data = cell_dict.get((row, col), None) if data: dt = data.get('data', None) if col == 3: if 'http://' not in dt and 'https://' not in dt: dt = 'http://' + dt try: dt = convert(dt) except: pass rowwiseData.append(dt.encode('ascii', 'xmlcharrefreplace')) print rowwiseData sqlst = "insert into %s(external_link_label,topic_tree_location,external_link_address,response,response_category,ping_date_time,redirect_url) values('%s','%s','%s','%s','%s','%s','%s')" % ( sheet_name, rowwiseData[0], rowwiseData[1], rowwiseData[2], '', '', '', '') print sqlst cursor.execute(sqlst) return 'Done'
def csv_from_excel(self): fname = '../process.xls' rcDict = getExcelData.getFileData(fname) conn = MySQLdb.connect(host='localhost', user='******', passwd='tas123', db='Lexis_Link_Monitoring_New') cursor = conn.cursor() sheets = rcDict.keys() sheets.sort() table_lst = {'B&F':'banking_finance','Business Law':'business_law','CA Bus Entity':'california_business_entity_selection_formation','Combined Bus Entity Selection':'combined_california_general_business_law','Bankruptcy':'commercial_bankrtupcy','Corp Counsel':'corporate_counsel','IP':'intellectual_property','L&E':'labor_employment','M&A':'m_a','NY B&C':'ny_business_commercial','Real Estate':'real_estate','S&CM':'securities_capital_markets','Texas B&C':'texas_business_commercial'} skip_var = 1 for sheet in sheets: if skip_var: skip_var -= 1 continue sheetdata = rcDict[sheet] nrows = sheetdata['nrows'] ncols = sheetdata['ncols'] sheet_name = table_lst[sheetdata['sheet_name']] stmt1 = 'select week_no from %s order by week_no desc limit 1' %(sheet_name) cursor.execute(stmt1) weekLst = cursor.fetchall() #print weekLst last_week_no = 0 for week in weekLst: last_week_no = week[0] break next_week_no = int(last_week_no) + 1 #print next_week_no #sys.exit() cell_dict = sheetdata['cell_dict'] for row in range(3, nrows): #print "===========================================" rowwiseData = [] for col in range(1, ncols): data = cell_dict.get((row, col), None) if data: dt = data.get('data', None) try: dt = self.convert(dt) except: pass rowwiseData.append(dt.encode('ascii', 'xmlcharrefreplace')) #print rowwiseData sqlst = "insert into %s(external_link_label,topic_tree_location,external_link_address,response,response_category,ping_date_time,redirect_url,week_no) values('%s','%s','%s','%s','%s','%s','%s',%s)" %(sheet_name,rowwiseData[0],rowwiseData[1],rowwiseData[2],'','','','',next_week_no) cursor.execute(sqlst) os.remove('../process.xls') return 'Done'
def csv_from_excel(): fname = '../LN_UK_Practice_Area.xls' #fname = '../test.xlsx' rcDict = getExcelData.getFileData(fname) conn = MySQLdb.connect(host='localhost', user='******', passwd='tas123', db='Lexis_Link_Monitoring_week5') cursor = conn.cursor() sheets = rcDict.keys() sheets.sort() for sheet in sheets: sheetdata = rcDict[sheet] nrows = sheetdata['nrows'] ncols = sheetdata['ncols'] #sheet_name = sheetdata['sheet_name'] sheet_name = 'LN_UK_Practice_Area_new' cell_dict = sheetdata['cell_dict'] for row in range(1, nrows): print "===========================================" rowwiseData = [] for col in range(0, ncols): data = cell_dict.get((row, col), None) if data: dt = data.get('data', None) if col == 2: if 'http://' not in dt and 'https://' not in dt: dt = 'http://'+dt try: dt = str(dt) except: pass try: dt = convert(dt) except: pass print [dt] rowwiseData.append(dt.encode('ascii', 'xmlcharrefreplace')) #rowwiseData.append(dt.encode('utf8')) print rowwiseData #sqlst = "insert into %s(external_link_label,topic_tree_location,external_link_address,response,response_category,ping_date_time,redirect_url) values('%s','%s','%s','%s','%s','%s','%s')" %(sheet_name,rowwiseData[0],rowwiseData[1],rowwiseData[2],'','','','') sqlst = "insert into %s(excel_id,title,external_link_address,practice_area,content_type,location,update_date,page_type,response,response_category,ping_date_time,redirect_url) values('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" %(sheet_name,rowwiseData[0],rowwiseData[1],rowwiseData[2],rowwiseData[3],rowwiseData[4],rowwiseData[5],rowwiseData[6],rowwiseData[7],'','','','') cursor.execute(sqlst) return 'Done';
def csv_from_excel(self): fname = '../process.xlsx' #fname = '../test.xlsx' rcDict = getExcelData.getFileData(fname) conn = MySQLdb.connect(host='localhost', user='******', passwd='tas123', db='Lexis_Link_Monitoring_New') cursor = conn.cursor() sheets = rcDict.keys() sheets.sort() for sheet in sheets: sheetdata = rcDict[sheet] nrows = sheetdata['nrows'] ncols = sheetdata['ncols'] sheet_name = sheetdata['sheet_name'] stmt1 = 'select week_no from %s order by week_no desc limit 1' %(sheet_name) cursor.execute(stmt1) weekLst = cursor.fetchall() #print weekLst last_week_no = 0 for week in weekLst: last_week_no = week[0] break next_week_no = int(last_week_no) + 1 #print next_week_no #sys.exit() cell_dict = sheetdata['cell_dict'] for row in range(1, nrows): #print "===========================================" rowwiseData = [] for col in range(0, ncols): data = cell_dict.get((row, col), None) if data: dt = data.get('data', None) try: dt = self.convert(dt) except: pass rowwiseData.append(dt.encode('ascii', 'xmlcharrefreplace')) #print rowwiseData sqlst = "insert into %s(external_link_label,topic_tree_location,external_link_address,response,response_category,ping_date_time,redirect_url,week_no) values('%s','%s','%s','%s','%s','%s','%s',%s)" %(sheet_name,rowwiseData[0],rowwiseData[1],rowwiseData[2],'','','','',next_week_no) cursor.execute(sqlst) return 'Done';
def csv_from_excel(): fname = '../External_Links_Report_Module_Breakdown_with_Summary_8_14_15.xls' #fname = '../test.xlsx' rcDict = getExcelData.getFileData(fname) conn = MySQLdb.connect(host='localhost', user='******', passwd='tas123', db='Lexis_Link_Monitoring_week14') cursor = conn.cursor() table_lst = {'B&F':'banking_finance','Business Law':'business_law','CA Bus Entity':'california_business_entity_selection_formation','Combined Bus Entity Selection':'combined_california_general_business_law','Bankruptcy':'commercial_bankrtupcy','Corp Counsel':'corporate_counsel','IP':'intellectual_property','L&E':'labor_employment','M&A':'m_a','NY B&C':'ny_business_commercial','Real Estate':'real_estate','S&CM':'securities_capital_markets','Texas B&C':'texas_business_commercial'} sheets = rcDict.keys() sheets.sort() i = 1 for sheet in sheets: if(i == 1): i += 1 continue sheetdata = rcDict[sheet] nrows = sheetdata['nrows'] ncols = sheetdata['ncols'] sheet_name = sheetdata['sheet_name'] sheet_name = table_lst[sheet_name] cell_dict = sheetdata['cell_dict'] for row in range(3, nrows): print "===========================================" rowwiseData = [] for col in range(1, ncols): data = cell_dict.get((row, col), None) if data: dt = data.get('data', None) if col == 3: if 'http://' not in dt and 'https://' not in dt: dt = 'http://'+dt try: dt = convert(dt) except: pass rowwiseData.append(dt.encode('ascii', 'xmlcharrefreplace')) print rowwiseData sqlst = "insert into %s(external_link_label,topic_tree_location,external_link_address,response,response_category,ping_date_time,redirect_url) values('%s','%s','%s','%s','%s','%s','%s')" %(sheet_name,rowwiseData[0],rowwiseData[1],rowwiseData[2],'','','','') print sqlst cursor.execute(sqlst) return 'Done';
def csv_from_excel(self): fname = '../process.xls' rcDict = getExcelData.getFileData(fname) conn = MySQLdb.connect(host='localhost', user='******', passwd='tas123', db='Lexis_Link_Monitoring_New') cursor = conn.cursor() sheets = rcDict.keys() sheets.sort() table_lst = { 'B&F': 'banking_finance', 'Business Law': 'business_law', 'CA Bus Entity': 'california_business_entity_selection_formation', 'Combined Bus Entity Selection': 'combined_california_general_business_law', 'Bankruptcy': 'commercial_bankrtupcy', 'Corp Counsel': 'corporate_counsel', 'IP': 'intellectual_property', 'L&E': 'labor_employment', 'M&A': 'm_a', 'NY B&C': 'ny_business_commercial', 'Real Estate': 'real_estate', 'S&CM': 'securities_capital_markets', 'Texas B&C': 'texas_business_commercial' } skip_var = 1 for sheet in sheets: if skip_var: skip_var -= 1 continue sheetdata = rcDict[sheet] nrows = sheetdata['nrows'] ncols = sheetdata['ncols'] sheet_name = table_lst[sheetdata['sheet_name']] stmt1 = 'select week_no from %s order by week_no desc limit 1' % ( sheet_name) cursor.execute(stmt1) weekLst = cursor.fetchall() #print weekLst last_week_no = 0 for week in weekLst: last_week_no = week[0] break next_week_no = int(last_week_no) + 1 #print next_week_no #sys.exit() cell_dict = sheetdata['cell_dict'] for row in range(3, nrows): #print "===========================================" rowwiseData = [] for col in range(1, ncols): data = cell_dict.get((row, col), None) if data: dt = data.get('data', None) try: dt = self.convert(dt) except: pass rowwiseData.append( dt.encode('ascii', 'xmlcharrefreplace')) #print rowwiseData sqlst = "insert into %s(external_link_label,topic_tree_location,external_link_address,response,response_category,ping_date_time,redirect_url,week_no) values('%s','%s','%s','%s','%s','%s','%s',%s)" % ( sheet_name, rowwiseData[0], rowwiseData[1], rowwiseData[2], '', '', '', '', next_week_no) cursor.execute(sqlst) os.remove('../process.xls') return 'Done'