def _create_rows(ie, reader): RowModel = get_import_row_model(ie.import_type) rows = [] idx = 0 for row in reader: data = json.dumps(clean_row_data(row)) rows.append(RowModel(data=data, import_event=ie, idx=idx)) idx += 1 if int(idx / settings.IMPORT_BATCH_SIZE) * settings.IMPORT_BATCH_SIZE == idx: RowModel.objects.bulk_create(rows) rows = [] if rows: RowModel.objects.bulk_create(rows) # create final partial block
def _create_rows(ie, reader): RowModel = get_import_row_model(ie.import_type) rows = [] idx = 0 for row in reader: data = json.dumps(clean_row_data(row)) rows.append(RowModel(data=data, import_event=ie, idx=idx)) idx += 1 if int(idx / BLOCK_SIZE) * BLOCK_SIZE == idx: RowModel.objects.bulk_create(rows) rows = [] if rows: RowModel.objects.bulk_create(rows) # create final partial block
def _create_rows(ie, reader): RowModel = get_import_row_model(ie.import_type) rows = [] idx = 0 for row in reader: data = clean_row_data(row) if len(filter(None, data.values())) > 0: # skip blank rows data = json.dumps(data) rows.append(RowModel(data=data, import_event=ie, idx=idx)) idx += 1 if ((int(idx / settings.IMPORT_BATCH_SIZE) * settings.IMPORT_BATCH_SIZE == idx)): RowModel.objects.bulk_create(rows) rows = [] if rows: RowModel.objects.bulk_create(rows) # create final partial block
def get_import_export(import_type, import_event_id): ie = _get_import_event(import_type, import_event_id) return [clean_row_data(json.loads(row.data)) for row in ie.rows()]