def delete_restaurant_by_id(restaurant_id: str) -> bool: with connection.cursor(cursor_factory=extras.RealDictCursor) as cursor: delete_restaurant_query = qb.build_delete_restaurant_by_id_query( restaurant_id) cursor.execute(delete_restaurant_query) if cursor.rowcount == 1: return True elif cursor.rowcount == 0: raise psycopg2.DataError("Not found") else: raise psycopg2.DataError( f"Found {cursor.rowcount} rows while 1 was expected")
def update_restaurant(restaurant): with connection.cursor() as cursor: restaurant_query = qb.build_update_restaurant_query(restaurant) cursor.execute(restaurant_query) if cursor.rowcount == 0: raise psycopg2.DataError("Not found") elif cursor.rowcount > 1: raise psycopg2.DataError( f"Found {cursor.rowcount} rows while 1 was expected") tags_delete_query = qb.build_delete_tags_query(restaurant.id) cursor.execute(tags_delete_query) if restaurant.tags: tags_query = qb.build_insert_tags_query(restaurant.id, restaurant.tags) cursor.execute(tags_query)
def test_getAuthorInfo_with_error(self): """ Test get author info when when meets DB error. """ self.mock_cursor.fetchall.side_effect = psycopg2.DataError() author = self.dbController.getAuthorInfo(123) self.mock_cursor.execute.assert_called_with("ROLLBACK") self.assertTrue(author is None)
def update_entries(self, table_name: str, user_id: str, values_dict: dict, update_type='rewrite', separator='|'): """ Обновляет запись одним из возможных способов ================================================================================ :param table_name: название таблицы, в которой обновляем :param user_id: порядковый ID человека в БД :param values_dict: Словарь значений следующего вида: {'название поля': 'новое значение'} :param update_type: Применяемый тип обновления записи; применяется для всех переданных в values_dict значений. > rewrite - DEFAULT - заменить старое значение в ячейке на новое > add - прибавить новое значение к старому (для числовых данных) > concat - соединить существующую строку с новой, возможно с использованием разделителя :param separator: разделитель строк, используемый при обновлении типа concat """ # try: with self.connection.cursor() as cursor: exist_entry = self.get_entry( table_name, list(values_dict.keys()), where_condition={'request_id': user_id}) # !!! ЗАМЕНИТЬ user_id на where_condition !!! if not exist_entry: raise psycopg2.DataError('Запись не существует.') else: if update_type == 'rewrite': result_dict = values_dict.copy() else: # поступившим данным присваиваются ключи запрошенных exist_entry_dict = { list(values_dict.keys())[i]: exist_entry[0][i] for i in range(len(values_dict.keys())) } if update_type == 'add': # складываем словари с одинаковыми ключами result_dict = self.add_dicts(values_dict, exist_entry_dict) elif update_type == 'concat': # складываем существующую строку и новую, +- разделитель result_dict = { key: str(exist_entry_dict[key]) + separator + str(values_dict[key]) for key in values_dict.keys() } query = "UPDATE " + table_name + \ " SET " + self.convert_dict_to_string(result_dict, separator=' = ') + \ " WHERE request_id = '" + user_id + "'" # !!! ЗАМЕНИТЬ user_id на # " AND ".join(self.convert_dict_to_string(where_condition, "=").split(", ")) !!! cursor.execute(query)
def __get_wed_cond(trname,dbs): job_conn = psycopg2.connect(dbs) curs = job_conn.cursor() curs.execute('select cpred from wed_trig where trname = %s',[trname]) if curs.rowcount != 1: raise psycopg2.DataError('WED-transition "%s" not found' %(trname)) return curs.fetchone()[0]
def db_copy_from(buffer, table, columns, raise_exception=True): """Copy data from file to db.""" connection = raw_connection() cursor = connection.cursor() try: cursor.copy_from(file=buffer, table=table, columns=columns, sep='\t') connection.commit() except psycopg2.DataError as error: if raise_exception: raise psycopg2.DataError(error) else: current_app.logger.error('data load error: {0}'.format(error)) connection.close()
def parse(self, s, curs): if s is None: return None tokens = self.tokenize(s) if len(tokens) != len(self.atttypes): raise psycopg2.DataError( "expecting %d components for the type %s, %d found instead", (len(self.atttypes), self.name, len(self.tokens))) attrs = [ curs.cast(oid, token) for oid, token in zip(self.atttypes, tokens) ] return self._ctor(*attrs)
def db_copy_to(filehandle, table, columns, raise_exception=True): """Copy data from db to file.""" connection = raw_connection() cursor = connection.cursor() try: cursor.copy_to(file=filehandle, table=table, columns=columns, sep='\t') cursor.connection.commit() except psycopg2.DataError as error: if raise_exception: raise psycopg2.DataError(error) else: current_app.logger.error('data load error: {0}'.format(error)) cursor.execute('VACUUM ANALYSE {table}'.format(table=table)) cursor.close() connection.close()
def get_restaurant_by_id(restaurant_id: str) -> Restaurant: with connection.cursor(cursor_factory=extras.RealDictCursor) as cursor: get_restaurant_query = qb.build_get_restaurant_by_id_query( restaurant_id) cursor.execute(get_restaurant_query) restaurant = cursor.fetchone() if restaurant is None: raise psycopg2.DataError("Not found") get_tags_query = qb.build_get_tags_by_restaurant_id_query( restaurant_id) cursor.execute(get_tags_query) if cursor.rowcount > 0: tags = [str(row['tag']) for row in cursor.fetchall()] else: tags = [] return Restaurant(tags=tags, **restaurant)
def test_load_csv_to_postgres_via_copy(self): # payload path = THIS_PATH + '/r2.csv' data_resource = { 'path': path, 'ckan_resource_id': '6f6b1c93-21ff-47ec-a0d6-e5be7c36d082', 'schema': { 'fields': [{ 'name': 'FID', 'type': 'text' }] } } with patch('psycopg2.connect') as mock_connect: # Validate Success mocked_res = {'success': True} mock_connect.cursor.return_value.copy_expert.return_value = 'success' assert load_csv_to_postgres_via_copy(data_resource, {}, mock_connect) == mocked_res # Validate Exception error_str = 'missing data for column "field2"' mocked_res_data_error = { 'success': False, 'message': 'Data Error during COPY command: {}'.format(error_str) } mock_connect.cursor.return_value.copy_expert.side_effect = \ psycopg2.DataError(error_str) self.assertEqual( load_csv_to_postgres_via_copy(data_resource, {}, mock_connect), str(mocked_res_data_error)) mocked_res_exception = { 'success': False, 'message': 'Generic Error during COPY: {}'.format(error_str) } mock_connect.cursor.return_value.copy_expert.side_effect = \ Exception(format(error_str)) self.assertEqual( load_csv_to_postgres_via_copy(data_resource, {}, mock_connect), str(mocked_res_exception))
def parse(self, s, curs): if s is None: return None tokens = self.tokenize(s) if len(tokens) != len(self.atttypes): raise psycopg2.DataError( "expecting %d components for the type %s, %d found instead" % (len(self.atttypes), self.name, len(tokens))) attrs = [ curs.cast(oid, token) for oid, token in zip(self.atttypes, tokens) ] o = {} for i in xrange(len(self.atttypes)): if attrs[i] is not None: o[self.attnames[i]] = attrs[i] o['record'] = self.name return o
def test_delete_index(self): # payload data_resource = { 'ckan_resource_id': RESOURCE_ID, } # Validate Success mocked_res = {'success': True} # Mock the SQL Connection with patch('psycopg2.connect') as mock_connect: mock_connect.cursor.return_value.execute.return_value. \ fetchall.return_value = ['res1', 'res2'] assert delete_index(data_resource, {}, mock_connect) == mocked_res # Validate Failure error_str = 'invalid data \xc3\xbc' mocked_res_error = { 'success': False, 'message': 'Error during deleting indexes: {}'.format(error_str) } with patch('psycopg2.connect') as mock_connect: mock_connect.cursor.return_value.execute.side_effect = \ psycopg2.DataError(error_str) self.assertEqual(delete_index(data_resource, {}, mock_connect), str(mocked_res_error))
def build_generator(plants, current_timestep): """Writes an intervention into the GeneratorData table Arguments --------- plants : list """ conn = establish_connection() cur = conn.cursor() expected_keys = [ 'type', 'name', 'location', 'min_power', 'capacity', 'build_year', 'technical_lifetime', 'sys_layer' ] for plant in plants: missing = [] valid = True for key in expected_keys: if key not in plant.keys(): missing.append(key) valid = False if not valid: raise ValueError("Keys {} missing for {}".format( missing, plant['name'])) try: plant['type'] = int(plant['type']) except TypeError: pass if isinstance(plant['type'], str): plant_type = { 'ccgt': 1, 'coal': 2, 'nuclear': 4, 'hydro': 5, 'oil': 6, 'ocgt (flexible generation)': 7, 'gas ccs': 8, 'becss': 9, 'biomass': 10, 'interconnector': 11, 'chp gas': 13, 'pumped_storage': 15, 'gas fired generation of ehs': 20, 'efw chp of ehs ': 21, 'biomass chp of ehs ': 22, 'h2 fuel cell ': 30, 'wind onshore': 3, 'wind offshore': 12, 'pv': 23, 'dummygenerator': 100 }[plant['type'].lower()] elif isinstance(plant['type'], int): plant_type = plant['type'] else: raise ValueError("'type' field '{}' is incorrect".format( plant['type'])) def extract_value(generator, field_name): if isinstance(generator[field_name], dict): value = float(generator[field_name]['value']) else: value = float(generator[field_name]) return value min_power = extract_value(plant, 'min_power') capacity = extract_value(plant, 'capacity') lifetime = extract_value(plant, 'technical_lifetime') if int(plant['sys_layer']) == 2: sql = """ INSERT INTO "GeneratorData" ("Type", "GeneratorName", "EH_Conn_Num","MinPower", "MaxPower", "Year", "Retire", "SysLayer") VALUES ( %s, %s, %s, %s, %s, %s, %s, %s)""" data = (plant_type, plant['name'], plant['location'], min_power, capacity, current_timestep, float(plant['build_year']) + lifetime, plant['sys_layer']) elif plant_type == 1: sql = """ INSERT INTO "GeneratorData" ("Type", "GeneratorName", "GasNode", "BusNum", "MinPower", "MaxPower", "Year", "Retire", "SysLayer") VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, %s) """ data = (plant_type, plant['name'], plant['to_location'], plant['location'], min_power, capacity, current_timestep, float(plant['build_year']) + lifetime, plant['sys_layer']) elif plant_type == 8: sql = """ INSERT INTO "GeneratorData" ("Type", "GeneratorName", "GasNode", "BusNum", "MinPower", "MaxPower", "Year", "Retire", "SysLayer") VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, %s) """ data = (plant_type, plant['name'], plant['to_location'], plant['location'], min_power, capacity, current_timestep, float(plant['build_year']) + lifetime, plant['sys_layer']) elif plant_type == 11: sql = """ INSERT INTO "GeneratorData" ("Type", "GeneratorName", "BusNum", "MinPower", "MaxPower", "Year", "Retire", "SysLayer","Inter_conn") VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, %s) """ data = (plant_type, plant['name'], plant['location'], min_power, capacity, current_timestep, float(plant['build_year']) + lifetime, plant['sys_layer'], plant['to_location']) elif plant_type == 15: sql = """ INSERT INTO "GeneratorData" ("Type", "GeneratorName", "BusNum", "MinPower", "MaxPower", "PumpStorageCapacity", "Year", "Retire", "SysLayer") VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, %s) """ data = (plant_type, plant['name'], plant['location'], min_power, capacity, extract_value(plant, 'pumpstore_capacity'), current_timestep, float(plant['build_year']) + lifetime, plant['sys_layer']) else: sql = """ INSERT INTO "GeneratorData" ("Type", "GeneratorName", "BusNum", "MinPower", "MaxPower", "Year", "Retire", "SysLayer") VALUES (%s, %s, %s, %s, %s, %s, %s, %s) """ data = (plant_type, plant['name'], plant['location'], min_power, capacity, current_timestep, float(plant['build_year']) + lifetime, plant['sys_layer']) try: cur.execute(sql, data) except psycopg2.DataError as ex: print(sql, data) raise psycopg2.DataError(ex) # Make the changes to the database persistent conn.commit() # Close communication with the database cur.close() conn.close()
def raise_data_error(self): raise psycopg2.DataError()