def test_good_body_patch(self, client, db_session, file, values_list, filepath): assert client.get(url_for('data_handler.file_info', fileid=1)).status_code == 404 db_session.add(file) db_session.commit() assert db_session.query(Files).all() == [file] samples = [] data_schema = DataSchema() for values in values_list: values['fileid'] = file.fileid sample = data_schema.load(values, session=db_session, partial=True).data assert isinstance(sample, Data) samples.append(sample) db_session.add(sample) db_session.commit() assert db_session.query(Data).all() assert db_session.query(Files).all() patch_resp = client.patch( url_for('data_handler.update_file', fileid=file.fileid), data={'file': (filepath, os.path.basename(filepath))}) assert patch_resp.status_code == 204 assert db_session.query(Files).all() assert db_session.query(Data).all() get_resp = client.get( url_for('data_handler.file_info', fileid=file.fileid)) assert get_resp.status_code == 200 assert get_resp.get_json( )['data_count'] == count_lines(filepath) - 1 + len(values_list) assert get_resp.get_json()['fileid'] == file.fileid assert get_resp.get_json()['filename'] == os.path.basename(filepath)
def test_delete(self, client, db_session, file, values_list): assert client.get(url_for('data_handler.file_info', fileid=1)).status_code == 404 db_session.add(file) db_session.commit() assert db_session.query(Files).all() == [file] samples = [] data_schema = DataSchema() for values in values_list: values['fileid'] = file.fileid sample = data_schema.load(values, session=db_session, partial=True).data assert isinstance(sample, Data) samples.append(sample) db_session.add(sample) db_session.commit() assert db_session.query(Data).all() == samples resp = client.get(url_for('data_handler.file_info', fileid=file.fileid)) assert resp.status_code == 200 assert client.delete( url_for('data_handler.delete_file', fileid=file.fileid + 1)).status_code == 404 deleter = client.delete( url_for('data_handler.delete_file', fileid=file.fileid)) assert deleter.status_code == 204 assert not db_session.query(Files).all() assert not db_session.query(Data).all() assert client.get(url_for('data_handler.file_info', fileid=file.fileid)).status_code == 404
def test_transactional_add(self, db_session, file, values_list): db_session.add(file) db_session.commit() assert db_session.query(Files).all() == [file] samples = [] data_schema = DataSchema() for values in values_list: values['fileid'] = file.fileid sample = data_schema.load(values, session=db_session, partial=True).data assert isinstance(sample, Data) samples.append(sample) db_session.add(sample) db_session.commit() assert db_session.query(Data).all() == samples
def test_multi_get_delete(self, client, db_session, files_list, values_list): test_len = min(len(files_list), len(values_list)) data_schema = DataSchema() samples = [] for i in range(test_len): db_session.add(files_list[i]) db_session.commit() values_list[i]['fileid'] = files_list[i].fileid sample = data_schema.load(values_list[i], session=db_session, partial=True).data assert isinstance(sample, Data) db_session.add(sample) samples.append(sample) db_session.commit() assert db_session.query(Data).all() == samples for i in range(test_len): resp = client.get( url_for('data_handler.file_info', fileid=files_list[i].fileid)) assert resp.status_code == 200 assert resp.is_json assert resp.get_json()['data_count'] == 1 assert resp.get_json()['fileid'] == files_list[i].fileid assert resp.get_json()['filename'] == files_list[i].filename for i in range(test_len): deleter = client.delete( url_for('data_handler.delete_file', fileid=files_list[0].fileid)) assert deleter.status_code == 204 assert client.get( url_for('data_handler.file_info', fileid=files_list[0].fileid)).status_code == 404 if i != test_len - 1: assert client.get( url_for('data_handler.file_info', fileid=files_list[1].fileid)).status_code == 200 samples.pop(0) files_list.pop(0) assert db_session.query(Data).all() == samples assert db_session.query(Files).all() == files_list assert not db_session.query(Files).all() assert not db_session.query(Data).all()
def uploadToDB(df): def insertChunk(inserterStatemant): for key in data_keys.keys(): if key in dicts[0].keys() or key == 'updated': try: norm_data_keys[key] = getattr(inserterStatemant.inserted, key) except AttributeError: pass inserterStatemant = inserterStatemant.on_duplicate_key_update( norm_data_keys ) try: db.session.execute(inserterStatemant) except (InternalError, DataError): raise RuntimeError(f"Data is not valid. " f"Problem finded in range " f"({last_boarder}, {min(last_boarder + chunkSize, len(dicts))}). " f"Try to check matching of values with its column names. " f"If you haven't find the problem, try to check the data in specified range.") dicts = df.to_dict('records') data_schema = DataSchema() data_keys = data_schema.dump(Data()).data norm_data_keys = {} last_boarder = 0 Logger.debug("Prepare to download") for i in range(chunkSize, len(dicts), chunkSize): inserterStatemant = insert(Data.__table__).values( dicts[i - chunkSize:i] ) insertChunk(inserterStatemant) Logger.debug(f"Downloaded chunk. Summary: {i}") last_boarder = i if last_boarder != len(dicts): inserterStatemant = insert(Data.__table__).values( dicts[last_boarder:len(dicts)] ) insertChunk(inserterStatemant) Logger.debug(f"Downloaded chunk. Summary: {len(dicts)}")
def test_empty_request_patch(self, client, db_session, file, values_list): db_session.add(file) db_session.commit() assert db_session.query(Files).all() == [file] samples = [] data_schema = DataSchema() for values in values_list: values['fileid'] = file.fileid sample = data_schema.load(values, session=db_session, partial=True).data assert isinstance(sample, Data) samples.append(sample) db_session.add(sample) db_session.commit() assert db_session.query(Data).all() == samples assert client.patch( url_for('data_handler.update_file', fileid=file.fileid)).status_code == 400 assert client.patch( url_for('data_handler.update_file', fileid=file.fileid + 1)).status_code == 404 assert db_session.query(Files).all() == [file] assert db_session.query(Data).all() == samples
def handleFile(fileid: int, file: FileStorage): str = StringIO(file.stream.readline().decode("utf-8")) if not str: abort(400) df = None try: df = read_csv(str, sep='[;,|]', engine="python", header=None) except errors.ParserError: abort(400) if df.empty: abort(400) index, title = next(df.iterrows()) title = title.tolist() # НЕОБХОДИМО ОТПРОФИЛИРОВАТЬ И ОПТИМИЗИРОВАТЬ!------------- # Считываем данные values = None data_schema = DataSchema() data_keys = data_schema.dump(Data()).data.keys() str = StringIO(file.stream.readline().decode("utf-8")) while str: try: df = read_csv(str, sep='[;,|]', engine="python", names=title) except errors.ParserError: abort(400) if df.empty: break values = parseRow(df, data_keys) values['fileid'] = fileid uploadRow(values, data_schema) str = StringIO(file.stream.readline().decode("utf-8")) #------------------------------------------------------------ if not values: abort(400)