def backup_druginfo(self, filename, columns, **renames): query = "SELECT * FROM SPB_DRUG_MST" self.cursor.execute(query) records = [row for row in self.cursor.fetchall()] if records: column = [row[0] for row in self.cursor.description] lst = Listorm(records) lst.rename(**renames) lst.to_excel(filename, selects=columns)
def query_save_to(user_id, password, keywords, public_ip, headers, _file, start=True, oneByone=True, distinct=True, detail=True, append=None, to_html=None): if isinstance(keywords, str): keywords = [keywords] append_lst = open_record_file(append) excludes = append_lst.unique('id') if len(append_lst) > 0 else [] dg = DrugInfoAPI(user_id, password, public_ip, headers) results = [] if oneByone: results = dg.search_one_by_one(keywords, detail=detail, exclude_ids=excludes) else: length, step = len(keywords), 50 todo_range = list(range(0, length, 50)) iter_range = tqdm(todo_range, total=len(todo_range)) for page in iter_range: keyword = ' '.join(keywords[page:page + step]) results += dg.search(keyword, detail=detail) lst = Listorm(results) + append_lst if distinct: lst.distinct('id') if to_html: create_img_html(lst, _file=to_html, start=start) if _file or append: if start and platform.system() == 'Windows': try: lst.to_excel(append or _file) os.startfile(append or _file) return except: for row in lst: print('Exception Occuer') # print(row) return else: lst.to_excel(append or _file) return lst
def queryset_to_file(queryset, filename, selects=None, to='excel'): records = queryset.values(*selects) if selects else queryset.values() lst = Listorm(records, nomalize=False) if to == 'excel': content = lst.to_excel() else: content = lst.to_csv() return file_response(content, filename)
from pprint import pprint from listorm import read_excel, Listorm try: from health import * from utils import get_edi_code_from_xl except: from .health import * from .utils import get_edi_code_from_xl test_file = '약품정보.xls' edis = ['644900310', '648900030', '641600030'] print('total edi count: ', len(edis)) records = [] for i, edi in enumerate(edis): print('\tgetting detail urls for {}({}/{})...'.format( edi, i + 1, len(edis))) soup = get_search_list_soup(edi) detail_urls = get_detail_urls(soup) for url in detail_urls: print('\t\tparsing detail: {}...'.format(url)) detail_soup = get_detail_soup(url) record = parse_detail_soup(detail_soup) records.append(record) lst = Listorm(records) lst.to_excel('test_result.xlsx')