def run_migration(): migrator = SqliteMigrator(db) with db.atomic(): migrate(migrator.drop_column('note', 'reminder_task_created'), ) FTSNote.create_table(True) with db.atomic(): for note in Note.select(): FTSNote.store_note(note)
def process_from_csv(input_file): stream = io.StringIO(input_file.read().decode('utf-8')) csvfile = csv.reader(stream) inserted = 0 invalid = 0 duplicated = 0 records = [] for row in csvfile: try: cmp = row[0].strip().upper() if len(cmp) == 6: with db.atomic(): doctor = Doctor.create(id=cmp) # Add to process queue result = default_queue.enqueue(scrap_cmp, doctor) inserted += 1 else: invalid += 1 except peewee.IntegrityError: duplicated += 1 except Exception as e: invalid += 1 return inserted, duplicated, invalid
def process_from_csv(input_file): stream = io.StringIO(input_file.read().decode('utf-8')) csvfile = csv.reader(stream) inserted = 0 invalid = 0 duplicated = 0 records = [] for row in csvfile: try: document_number = row[0].strip().upper() if len(document_number) == 8: with db.atomic(): graduate = Graduate.create(id=document_number) # Add to process queue result = default_queue.enqueue(scrap_document_number, graduate) inserted += 1 else: invalid += 1 except peewee.IntegrityError: duplicated += 1 except Exception as e: invalid += 1 return inserted, duplicated, invalid
def save(self, *args, **kwargs): # Split out the text content and any tasks. self.content, tasks = self.parse_content_tasks() # Update the timestamp. self.timestamp = datetime.datetime.now() # Save the note. with db.atomic(): ret = super(Note, self).save(*args, **kwargs) # Store the tasks in the database. if tasks: Task.delete().where(Task.note == self).execute() for idx, (finished, title) in enumerate(tasks): Task.create( note=self, finished=finished, title=title, order=idx) # Store the content for full-text search. FTSNote.store_note(self) return ret
def populate_entities(entities_list): with db.atomic(): for i, entity in enumerate(entities_list): try: entity_name = "{}.{}".format( entity["inchikey"].replace("InChIKey=", ""), "json") if i % 100 == 0: print(entity_name, "populating", i) ClassyFireEntity.get_or_create(inchikey=entity_name, responsetext=json.dumps(entity), status="DONE") except KeyboardInterrupt: raise except: print("ERROR", entity)
def make_diffs(self): from models import PayRecord, PayDiff from app import db prev_rex = self._get_previous_records() if not prev_rex: return for current_rec in self.records: qry = prev_rex.select().where(PayRecord.employee == current_rec.employee) if not qry.exists(): continue rec_diffs = PayRecord.get_diffs(prev_rex.get(), current_rec) if rec_diffs: with db.atomic(): PayDiff.insert_many(rec_diffs).execute()
def import_data(): path = 'c:\\bench\\payrun\\pyvista\\fms\\tests\\run' runs = [ {'ien': '559', 'fy': 13, 'pp': 15, 'cp': '015'}, {'ien': '559', 'fy': 13, 'pp': 15, 'cp': '016'}, {'ien': '560', 'fy': 13, 'pp': 16, 'cp': '015'}, {'ien': '560', 'fy': 13, 'pp': 16, 'cp': '016'}, {'ien': '616', 'fy': 15, 'pp': 20, 'cp': '015'}, {'ien': '616', 'fy': 15, 'pp': 20, 'cp': '016'}, {'ien': '617', 'fy': 15, 'pp': 21, 'cp': '015'}, {'ien': '617', 'fy': 15, 'pp': 21, 'cp': '016'}, {'ien': '618', 'fy': 15, 'pp': 22, 'cp': '015'}, {'ien': '618', 'fy': 15, 'pp': 22, 'cp': '016'}, {'ien': '619', 'fy': 15, 'pp': 23, 'cp': '015'}, {'ien': '619', 'fy': 15, 'pp': 23, 'cp': '016'} ] with db.atomic(): PayRun.insert_many(runs).execute() runs = [run for run in PayRun.select()] for run in runs: print('%d: %s: %s' % (run.id, run.ien, str(run))) filename = path + run.ien + '_' + run.cp + '.txt' f = open(filename) dta = f.readlines() f.close() for line in dta: if line[0].isdigit(): emp = make_emp(line, run.cp) rec = make_rec(line.rstrip(), run.id, emp.id) prev_run_id = run.get_previous_run_id() if prev_run_id: print('getting rex...') rex = [rec for rec in run.records] print('%d rex' % len(rex)) for rec in rex: print('rec: %d' % rec.id) rec.make_diffs(prev_run_id)
def add_match(match_id): """Queries a match from the Steam API and adds it to the database. Returns True if it succeeds or if match already exists, or False if it fails. """ match = api.get_match_details(match_id)["result"] if "error" in match: return False try: new_match = Match.create(**match) except IntegrityError: return True except PyDotaError: return False player_list = [dict(match_id=new_match.match_id, **player) for player in match["players"]] with db.atomic(): MatchPlayer.insert_many(player_list).execute() return True
def make_diffs(self, prev_run_id): from app import db, diff_fields from models import PayDiff prev_rec = self._get_previous_record(prev_run_id) if not prev_rec: return diffs = [] for field in diff_fields[3:]: curval = getattr(self, field) preval = getattr(prev_rec, field) if curval != preval: diffs.append({ 'payrun': self.payrun, 'record': self, 'field_name': field, 'current_amount': curval, 'previous_amount': preval }) if diffs: with db.atomic(): PayDiff.insert_many(diffs).execute()
def process_from_csv(input_file): stream = io.StringIO(input_file.read().decode('utf-8')) csvfile = csv.reader(stream, delimiter=';') inserted = 0 invalid = 0 duplicated = 0 records = [] for row in csvfile: try: document_ruc = row[0].strip().upper() document_dni = row[1].strip().upper() if len(document_ruc) == 11 and (len(document_dni) > 7 and len(document_dni) < 10): with db.atomic(): rrll_claro = RRLL.create(ruc=document_ruc, dni=document_dni, provider='CLARO') rrll_movistar = RRLL.create(ruc=document_ruc, dni=document_dni, provider='MOVISTAR') # Add to process queue result = default_queue.enqueue(scrap_claro_lines, rrll_claro) result = default_queue.enqueue(scrap_movistar_lines, rrll_movistar) inserted += 1 else: invalid += 1 except peewee.IntegrityError: duplicated += 1 except Exception as e: print(e) invalid += 1 return inserted, duplicated, invalid
def do_import(quotes): lst = create_insert_list(quotes) with db.atomic(): Quote.insert_many(lst).execute()
import csv import os import time from models import City from app import db City.create_table(True) i = 0 data_source = [] print(City.delete().execute()) with open("cp.csv", "r", encoding="utf-8") as csvfile: spamreader = csv.reader(csvfile, delimiter=";") for row in spamreader: data_source.append({"insee": row[0], "name": row[1], "postal_code": row[2], "label": row[3]}) i += 1 if len(data_source) > 100: print("+1") with db.atomic(): City.insert_many(data_source).execute() data_source = [] time.sleep(0.02)
import os import time from models import City from app import db City.create_table(True) i = 0 data_source = [] print(City.delete().execute()) with open('cp.csv', 'r', encoding="utf-8") as csvfile: spamreader = csv.reader(csvfile, delimiter=';') for row in spamreader: data_source.append( { "insee": row[0], "name": row[1], "postal_code": row[2], "label": row[3] } ) i += 1 if len(data_source) > 100: print('+1') with db.atomic(): City.insert_many(data_source).execute() data_source = [] time.sleep(0.02)