def _params_tests(self): def test_val(typ, val): with self.conn.cursor() as cur: param = Column(type=typ, value=val) logger.info("Testing with %s", repr(param)) cur.execute('select %s', [param]) self.assertTupleEqual(cur.fetchone(), (val, )) self.assertIs(cur.fetchone(), None) test_val(BitType(), True) test_val(BitType(), False) test_val(BitType(), None) test_val(TinyIntType(), 255) test_val(SmallIntType(), 2**15 - 1) test_val(IntType(), 2**31 - 1) test_val(BigIntType(), 2**63 - 1) test_val(IntType(), None) test_val(RealType(), 0.25) test_val(FloatType(), 0.25) test_val(RealType(), None) test_val(SmallDateTimeType(), datetime(1900, 1, 1, 0, 0, 0)) test_val(SmallDateTimeType(), datetime(2079, 6, 6, 23, 59, 0)) test_val(DateTimeType(), datetime(1753, 1, 1, 0, 0, 0)) test_val(DateTimeType(), datetime(9999, 12, 31, 23, 59, 59, 990000)) test_val(DateTimeType(), None) if pytds.tds_base.IS_TDS73_PLUS(self.conn._conn): test_val(DateType(), date(1, 1, 1)) test_val(DateType(), date(9999, 12, 31)) test_val(DateType(), None) test_val(TimeType(precision=0), time(0, 0, 0)) test_val(TimeType(precision=6), time(23, 59, 59, 999999)) test_val(TimeType(precision=0), None) test_val(DateTime2Type(precision=0), datetime(1, 1, 1, 0, 0, 0)) test_val(DateTime2Type(precision=6), datetime(9999, 12, 31, 23, 59, 59, 999999)) test_val(DateTime2Type(precision=0), None) test_val(DateTimeOffsetType(precision=6), datetime(9999, 12, 31, 23, 59, 59, 999999, utc)) test_val(DateTimeOffsetType(precision=6), datetime(9999, 12, 31, 23, 59, 59, 999999, tzoffset(14))) test_val(DateTimeOffsetType(precision=0), datetime(1, 1, 1, 0, 0, 0, tzinfo=tzoffset(-14))) #test_val(DateTimeOffsetType(precision=0), datetime(1, 1, 1, 0, 0, 0, tzinfo=tzoffset(14))) test_val(DateTimeOffsetType(precision=6), None) test_val(DecimalType(scale=6, precision=38), Decimal('123.456789')) test_val(DecimalType(scale=6, precision=38), None) test_val(SmallMoneyType(), Decimal('-214748.3648')) test_val(SmallMoneyType(), Decimal('214748.3647')) test_val(MoneyType(), Decimal('922337203685477.5807')) test_val(MoneyType(), Decimal('-922337203685477.5808')) test_val(MoneyType(), None) test_val(UniqueIdentifierType(), None) test_val(UniqueIdentifierType(), uuid.uuid4()) if pytds.tds_base.IS_TDS71_PLUS(self.conn._conn): test_val(VariantType(), None) #test_val(self.conn._conn.type_factory.SqlVariant(10), 100) test_val(VarBinaryType(size=10), b'') test_val(VarBinaryType(size=10), b'testtest12') test_val(VarBinaryType(size=10), None) test_val(VarBinaryType(size=8000), b'x' * 8000) test_val(VarCharType(size=10), None) test_val(VarCharType(size=10), '') test_val(VarCharType(size=10), 'test') test_val(VarCharType(size=8000), 'x' * 8000) test_val(NVarCharType(size=10), u'') test_val(NVarCharType(size=10), u'testtest12') test_val(NVarCharType(size=10), None) test_val(NVarCharType(size=4000), u'x' * 4000) test_val(TextType(), None) test_val(TextType(), '') test_val(TextType(), 'hello') test_val(NTextType(), None) test_val(NTextType(), '') test_val(NTextType(), 'hello') test_val(ImageType(), None) test_val(ImageType(), b'') test_val(ImageType(), b'test') if pytds.tds_base.IS_TDS72_PLUS(self.conn._conn): test_val(VarBinaryMaxType(), None) test_val(VarBinaryMaxType(), b'') test_val(VarBinaryMaxType(), b'testtest12') test_val(VarBinaryMaxType(), b'x' * (10**6)) test_val(NVarCharMaxType(), None) test_val(NVarCharMaxType(), 'test') test_val(NVarCharMaxType(), 'x' * (10**6)) test_val(VarCharMaxType(), None) test_val(VarCharMaxType(), 'test') test_val(VarCharMaxType(), 'x' * (10**6)) test_val(XmlType(), '<root/>')
def copy_to(self, file, table_or_view, sep='\t', columns=None, check_constraints=False, fire_triggers=False, keep_nulls=False, kb_per_batch=None, rows_per_batch=None, order=None, tablock=False, schema=None): """ *Experimental*. Efficiently load data to database from file using ``BULK INSERT`` operation :param file: Source file-like object, should be in csv format :param table_or_view: Destination table or view in the database :type table_or_view: str Optional parameters: :keyword sep: Separator used in csv file :type sep: str :keyword columns: List of column names in target table to insert to, if not provided will insert into all columns :type columns: list :keyword check_constraints: Check table constraints for incoming data :type check_constraints: bool :keyword fire_triggers: Enable or disable triggers for table :type fire_triggers: bool :keyword keep_nulls: If enabled null values inserted as-is, instead of inserting default value for column :type keep_nulls: bool :keyword kb_per_batch: Kilobytes per batch can be used to optimize performance, see MSSQL server documentation for details :type kb_per_batch: int :keyword rows_per_batch: Rows per batch can be used to optimize performance, see MSSQL server documentation for details :type rows_per_batch: int :keyword order: The ordering of the data in source table. List of columns with ASC or DESC suffix. E.g. ``['order_id ASC', 'name DESC']`` Can be used to optimize performance, see MSSQL server documentation for details :type order: list :keyword tablock: Enable or disable table lock for the duration of bulk load :keyword schema: Name of schema for table or view, if not specified default schema will be used """ conn = self._conn() import csv reader = csv.reader(file, delimiter=sep) obj_name = tds_base.tds_quote_id(table_or_view) if schema: obj_name = '{0}.{1}'.format(tds_base.tds_quote_id(schema), obj_name) if columns: metadata = [ Column(name=name, type=NVarCharType(size=4000), flags=Column.fNullable) for name in columns ] else: self.execute('select top 1 * from {} where 1<>1'.format(obj_name)) metadata = [ Column(name=col[0], type=NVarCharType(size=4000), flags=Column.fNullable if col[6] else 0) for col in self.description ] col_defs = ','.join( '{0} {1}'.format(col.column_name, col.type.get_declaration()) for col in metadata) with_opts = [] if check_constraints: with_opts.append('CHECK_CONSTRAINTS') if fire_triggers: with_opts.append('FIRE_TRIGGERS') if keep_nulls: with_opts.append('KEEP_NULLS') if kb_per_batch: with_opts.append('KILOBYTES_PER_BATCH = {0}'.format(kb_per_batch)) if rows_per_batch: with_opts.append('ROWS_PER_BATCH = {0}'.format(rows_per_batch)) if order: with_opts.append('ORDER({0})'.format(','.join(order))) if tablock: with_opts.append('TABLOCK') with_part = '' if with_opts: with_part = 'WITH ({0})'.format(','.join(with_opts)) operation = 'INSERT BULK {0}({1}) {2}'.format(obj_name, col_defs, with_part) self.execute(operation) self._session.submit_bulk(metadata, reader) self._session.process_simple_request()
def copy_to(self, file=None, table_or_view=None, sep='\t', columns=None, check_constraints=False, fire_triggers=False, keep_nulls=False, kb_per_batch=None, rows_per_batch=None, order=None, tablock=False, schema=None, null_string=None, data=None): """ *Experimental*. Efficiently load data to database from file using ``BULK INSERT`` operation :param file: Source file-like object, should be in csv format. Specify either this or data, not both. :param table_or_view: Destination table or view in the database :type table_or_view: str Optional parameters: :keyword sep: Separator used in csv file :type sep: str :keyword columns: List of Column objects or column names in target table to insert to. SQL Server will do some conversions, so these may not have to match the actual table definition exactly. If not provided will insert into all columns assuming nvarchar(4000) NULL for all columns. If only the column name is provided, the type is assumed to be nvarchar(4000) NULL. If rows are given with file, you cannot specify non-string data types. If rows are given with data, the values must be a type supported by the serializer for the column in tds_types. :type columns: list :keyword check_constraints: Check table constraints for incoming data :type check_constraints: bool :keyword fire_triggers: Enable or disable triggers for table :type fire_triggers: bool :keyword keep_nulls: If enabled null values inserted as-is, instead of inserting default value for column :type keep_nulls: bool :keyword kb_per_batch: Kilobytes per batch can be used to optimize performance, see MSSQL server documentation for details :type kb_per_batch: int :keyword rows_per_batch: Rows per batch can be used to optimize performance, see MSSQL server documentation for details :type rows_per_batch: int :keyword order: The ordering of the data in source table. List of columns with ASC or DESC suffix. E.g. ``['order_id ASC', 'name DESC']`` Can be used to optimize performance, see MSSQL server documentation for details :type order: list :keyword tablock: Enable or disable table lock for the duration of bulk load :keyword schema: Name of schema for table or view, if not specified default schema will be used :keyword null_string: String that should be interpreted as a NULL when reading the CSV file. Has no meaning if using data instead of file. :keyword data: The data to insert as an iterable of rows, which are iterables of values. Specify either this or file, not both. """ conn = self._conn() rows = None if data is None: import csv reader = csv.reader(file, delimiter=sep) if null_string is not None: def _convert_null_strings(csv_reader): for row in csv_reader: yield [r if r != null_string else None for r in row] reader = _convert_null_strings(reader) rows = reader else: rows = data obj_name = tds_base.tds_quote_id(table_or_view) if schema: obj_name = '{0}.{1}'.format(tds_base.tds_quote_id(schema), obj_name) if columns: metadata = [] for column in columns: if isinstance(column, Column): metadata.append(column) else: metadata.append( Column(name=column, type=NVarCharType(size=4000), flags=Column.fNullable)) else: self.execute('select top 1 * from {} where 1<>1'.format(obj_name)) metadata = [ Column(name=col[0], type=NVarCharType(size=4000), flags=Column.fNullable if col[6] else 0) for col in self.description ] col_defs = ','.join('{0} {1}'.format( tds_base.tds_quote_id(col.column_name), col.type.get_declaration()) for col in metadata) with_opts = [] if check_constraints: with_opts.append('CHECK_CONSTRAINTS') if fire_triggers: with_opts.append('FIRE_TRIGGERS') if keep_nulls: with_opts.append('KEEP_NULLS') if kb_per_batch: with_opts.append('KILOBYTES_PER_BATCH = {0}'.format(kb_per_batch)) if rows_per_batch: with_opts.append('ROWS_PER_BATCH = {0}'.format(rows_per_batch)) if order: with_opts.append('ORDER({0})'.format(','.join(order))) if tablock: with_opts.append('TABLOCK') with_part = '' if with_opts: with_part = 'WITH ({0})'.format(','.join(with_opts)) operation = 'INSERT BULK {0}({1}) {2}'.format(obj_name, col_defs, with_part) self.execute(operation) self._session.submit_bulk(metadata, rows) self._session.process_simple_request()