def cutter(fn): '''cut file to smaller files, yield temp filename''' with open(fn, 'rb') as f: chunk = f.read(INT_PER_FILE * 4) while chunk: with tmpf('wb', delete=False, dir=TMP_DIR) as fout: fout.write(chunk) yield fout.name chunk = f.read(INT_PER_FILE * 4)
def merge_files(fn_list): '''merge files in fn_list, return filename of resulting temp file''' it = [file_reader(fn) for fn in fn_list] with tmpf('wb', delete=False, dir=TMP_DIR) as f: # ugly but much quicker then c.append() with len(c) c = array.array('i', [0] * INT_PER_CHUNK) cidx = 0 for i in heapq.merge(*it): c[cidx] = i cidx += 1 if cidx == INT_PER_CHUNK: c.tofile(f.file) cidx = 0 c = array.array('i', [0] * INT_PER_CHUNK) c = array.array('i', [c[i] for i in range(cidx)]) c.tofile(f.file) return f.name
def write_data(_app: object, mo: str, year: int, month: int, pack: str, check: bool, sent: bool, fresh: bool, xmldir: str, stom=False, nusl='') -> tuple: """ # _app: obj, current app FLASK object # mo: string(3) head MO code # year: int year 4 digit # month: int month 2 digits # pack: string(2), pack number # check: if TRUE -> check tables recs only, don't make xml pack # else make zip xml pack and fill error_pack table, dont make error_pack # sent: bool if true set records field talon_type = 2 else ignore # fresh: bool if true ignore already sent and accepted records else not make full pack # xmldir: string working xml directory # stom bool for use stom pmu table in the prosess # nusl: string for AND condition of SQL SELECT statement or '' """ #qonn = _app.config.db() qurs = g.qonn.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor) qurs1 = g.qonn.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor) pmSluch = PmSluch(mo) hmZap = HmZap(mo) lmPers = LmPers(mo) #pmFile= open( f'{xmldir}pm.xml', 'r+') #hmFile= open( f'{xmldir}hm.xml', 'r+') #lmFile= open( f'{xmldir}lm.xml', 'r+') errFname = f'{xmldir}\\error_pack_{time.time()}.csv' errorFile = open(errFname, 'w') if check else None qurs1.execute(_sql.truncate_errors) g.qonn.commit() # make pack anyway if not check, just ignore all errors pmFile = tmpf(mode="r+") if not check else None hmFile = tmpf(mode="r+") if not check else None lmFile = tmpf(mode="r+", encoding='1251') if not check else None ya = int(str(year)[2:]) query = _sql.get_hpm_data % ya if fresh: # talon_type == 1 only will be included in pack query = f'{query}{_sql.fresh}' else: #type > 0 query = f'{query}{_sql.all_tal}' query = f'{query}{_sql.month}' qurs.execute(query, (month, )) rc = errors = 0 for rdata in qurs: _nmo = get_npr_mo(qurs1, rdata) qurs1.execute(_sql.get_usl, ( ya, ya, rdata.idcase, )) _usl = qurs1.fetchall() # specaial usl for posesh obrasch qurs1.execute(_sql.get_spec_usl, ( ya, rdata.idcase, )) _usp = qurs1.fetchone() _stom = list() if stom: qurs1.execute(_sql.get_stom, (rdata.idcase, )) _stom = qurs1.fetchall() try: _data = DataObject(rdata, mo, _nmo) write_sluch(check, pmData(_data), pmFile, pmSluch, _usl, _usp, _stom) write_zap(check, hmData(_data), hmFile, hmZap, _usl, _usp) write_pers(check, lmData(_data), lmFile, lmPers) except Exception as e: if errorFile: errorFile.write(f'{rdata.card}-{e}\n') print(e) qurs1.execute(_sql.set_error, (rdata.idcase, rdata.card, str(e).split('-')[1])) errors += 1 continue # mark as sent if not check and sent: qurs1.execute(_sql.mark_as_sent, (ya, rdata.idcase)) rc += 1 # check data routine if check: # errors > 0 and check: # return error_pack file errorFile.close() qurs.close() qurs1.close() g.qonn.commit() if not bool(errors): os.remove(errFname) errFname = '' return rc, len(lmPers.uniq), errFname, errors # no right records found if not bool(rc): for f in (hmFile, pmFile, lmFile): if f: f.close() qurs.close() qurs1.close() g.qonn.commit() return rc, rc, '', errors # make zip file anyway and return it to_zip = [] for f, h in ((hmFile, HmHdr), (pmFile, PmHdr), (lmFile, LmHdr)): f.seek(0) to_zip.append( write_hdr(h, mo, year, month, pack, xmldir, f, sd_z=rc, summ='0.00')) f.close() _p = HdrMix(mo, year, month, pack) os.chdir(xmldir) zfile = f'{_p.pack_name}' with zipfile.ZipFile(zfile, 'w', compression=zipfile.ZIP_DEFLATED) as zipH: for f in to_zip: zipH.write(f) # _app.logger.debug(lmPers.dubl) qurs.close() qurs1.close() g.qonn.commit() # qonn.close() return rc, len(lmPers.uniq), os.path.join(xmldir, zfile), errors