def test1(self):
        run_context = SuiteContext('unittest')
        db_context = run_context.getDBContext('remote')
        self.LOGGER = run_context.get_logger()

        # Set up a test table definition
        with get_temp_table(db_context) as table:
            for i in xrange(100):
                table.add(FieldSpec("col_{}".format(i), 'NVARCHAR', 8))

            fts = FastTableStream(table, use_names=False, raw=True)
            data = [[unicode(100 * j + i) for i in xrange(100)]
                    for j in xrange(1000)]

            def do_write():
                with fts:
                    for j in xrange(5):
                        fts.write_many(data)

            pr = cProfile.Profile()
            pr.enable()
            pr.runcall(do_write)
            pr.disable()

            filename = os.path.join(
                run_context.logs_dir,
                'fast_table_write_remote_unicode_raw_profile.txt')
            with open(filename, 'w') as f:
                stats = pstats.Stats(pr, stream=f)
                stats.print_stats()
            self.LOGGER.info("Table name {}".format(table))
Exemplo n.º 2
0
def main():
    """
    This runs a test of the complementary_merge wrapper/glue code.
    """
    run_context = SuiteContext('OGT_test{}'.format(RUN_CONTEXT_NUMBER))
    log = run_context.get_logger('ComplementaryMerge')
    db_context = run_context.getDBContext()

    intermediate_path = 'OGT Fall 2012' if (
        SUMMER_OR_FALL
        == 'F') else 'OGT Spring 2012'  # Summer has SAS variables.
    pathname = os.path.join(CVSROOT, 'CSSC Score Reporting', intermediate_path,
                            'Code/Development/Intake')
    bookmap_location_file_name = os.path.join(pathname,
                                              BOOKMAP_LOCATION_FILE_NAME)
    log.debug("main - bookmap_location_file_name[%s]" %
              bookmap_location_file_name)
    print("bookmap_location_file_name[%s]" % bookmap_location_file_name)
    mergespec_file_name = os.path.join(run_context.tests_safe_dir,
                                       MERGESPEC_FILE_NAME)

    input_table_names = {
        FLAT_TABLE_KEY_NAME: 'rc2FINAL',
        'C': 'mc_table_C',
        'M': 'mc_table_M',
        'R': 'mc_table_R',
        'S': 'mc_table_S',
        'W': 'mc_table_W'
    }
    output_table_names = {
        FLAT_TABLE_KEY_NAME: 'rc2FINAL_cmrg',
        'C': 'mc_table_C_cmrg',
        'M': 'mc_table_M_cmrg',
        'R': 'mc_table_R_cmrg',
        'S': 'mc_table_S_cmrg',
        'W': 'mc_table_W_cmrg'
    }

    for key in output_table_names:
        dbutilities.drop_table_if_exists(db_context=db_context,
                                         table=output_table_names[key])

    try:
        complementary_merge(
            run_context=run_context,
            bookmap_location_file_name=bookmap_location_file_name,
            bookmap_sheet=BOOKMAP_SHEET,
            mergespec_file_name=mergespec_file_name,
            input_table_names=input_table_names,
            output_table_names=output_table_names)
        #create_mergespec_file( run_context=run_context, input_table_names=input_table_names,
        #    new_mergespec_file='C:/new_mergespec_file.csv' )
    except Exception, error_msg:
        log.exception('\n\n')
        raise
Exemplo n.º 3
0
class TestPreQC( unittest.TestCase ):
    
    def setUp(self):
        
        self.run_context = SuiteContext( 'unittest' )
        self.db_context = self.run_context.getDBContext( 'unittest' )
        self.LOGGER = self.run_context.get_logger()
        
        
    def tearDown(self):
        pass
        
    def runTest(self):
        pass
Exemplo n.º 4
0
def main( ):
    """
    This runs a test of the district_performance_summary_report wrapper/glue code.
    """
    run_context = SuiteContext( 'OGT_test{}'.format( RUN_CONTEXT_NUMBER ) )
    log = run_context.get_logger( 'DistrictPerformanceSummaryReport' )

    specfile = os.path.join( run_context.tests_safe_dir, SPECFILE )
    log.debug( "main - specfile[{}]".format( specfile ) )

    # sch_type = H = 12 records
    # sch_type = P = 10495 records

    dpsr.district_performance_summary_report(run_context=run_context, specfile=specfile, input_table_name='student' )
class TestFastTableStream(unittest.TestCase):
    def setUp(self):

        self.run_context = SuiteContext('unittest')
        self.db_context = self.run_context.getDBContext('unittest')
        self.LOGGER = self.run_context.get_logger()

        # Set up a test table definition
        self.table = get_temp_table(self.db_context)
        (self.table.add(FieldSpec("col1", "NVARCHAR", 8)).add(
            FieldSpec("col2", "FLOAT")).add(FieldSpec("col3", "TINYINT")).add(
                FieldSpec("col4", "INT")).add(FieldSpec("col5", "BIGINT")))

    def tearDown(self):
        self.table.drop()

    def runTest(self):
        pass

    def testWriteUnicodeCharacters(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(FieldSpec("col_{}".format(i), 'NVARCHAR', 8))

        fts = FastTableStream(self.table, use_names=False)
        with fts:
            for j in xrange(10000):
                fts.write([100 * j + i for i in xrange(100)])
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteUnicodeMany(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(FieldSpec("col_{}".format(i), 'NVARCHAR', 8))

        fts = FastTableStream(self.table, use_names=False)
        data = [[100 * j + i for i in xrange(100)] for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_unicode_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteUnicodeRaw(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(FieldSpec("col_{}".format(i), 'NVARCHAR', 8))

        fts = FastTableStream(self.table, use_names=False, raw=True)
        data = [[unicode(100 * j + i) for i in xrange(100)]
                for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_unicode_raw_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteUnicodeNoNull(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(
                FieldSpec("col_{}".format(i), 'NVARCHAR', 8, nullable=False))

        fts = FastTableStream(self.table, use_names=False, raw=True)
        data = [[unicode(100 * j + i) for i in xrange(100)]
                for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_unicode_nonull_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteCharacters(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(FieldSpec("col_{}".format(i), 'VARCHAR', 8))

        fts = FastTableStream(self.table, use_names=False)
        with fts:
            for j in xrange(10000):
                fts.write([str(100 * j + i)[:8] for i in xrange(100)])
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteMany(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(FieldSpec("col_{}".format(i), 'VARCHAR', 8))

        fts = FastTableStream(self.table, use_names=False)
        data = [[100 * j + i for i in xrange(100)] for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_char_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteRaw(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(FieldSpec("col_{}".format(i), 'VARCHAR', 8))

        fts = FastTableStream(self.table, use_names=False, raw=True)
        data = [[str(100 * j + i) for i in xrange(100)] for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_char_raw_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteIntegerMany(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(FieldSpec("col_{}".format(i), 'INT'))

        fts = FastTableStream(self.table, use_names=False)
        data = [[100 * j + i for i in xrange(100)] for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(
            self.run_context.logs_dir,
            'fast_table_write_int_with_checks_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteIntegerRaw(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(FieldSpec("col_{}".format(i), 'INT'))

        fts = FastTableStream(self.table, use_names=False, raw=True)
        data = [[100 * j + i for i in xrange(100)] for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_int_raw_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteNoNull(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(
                FieldSpec("col_{}".format(i), 'VARCHAR', 8, nullable=False))

        fts = FastTableStream(self.table, use_names=False, raw=True)
        data = [[str(100 * j + i) for i in xrange(100)] for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_char_nonull_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteFloatNoNull(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(
                FieldSpec("col_{}".format(i), 'FLOAT', nullable=False))

        fts = FastTableStream(self.table,
                              use_names=False,
                              raw=True,
                              dumpfile="C:\\Scratch\\float_no_null.dat")
        data = [[100.0 * j + i for i in xrange(100)] for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_float_nonull_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteFloatMany(self):
        del self.table[:]
        for i in xrange(100):
            self.table.add(FieldSpec("col_{}".format(i), 'FLOAT'))

        fts = FastTableStream(self.table,
                              use_names=False,
                              dumpfile="C:\\Scratch\\float.dat")
        data = [[100.0 * j + i for i in xrange(100)] for j in xrange(1000)]

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_float_nonull_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testWriteMixed(self):
        del self.table[:]
        for i in xrange(33):
            self.table.add(FieldSpec("float_{}".format(i), 'FLOAT'))
            self.table.add(FieldSpec("int_{}".format(i), 'INT'))
            self.table.add(FieldSpec("str_{}".format(i), 'VARCHAR', 6))

        fts = FastTableStream(self.table, use_names=False)
        data = []
        for j in xrange(1000):
            row = []
            for i in xrange(33):
                k = 100 * j + i
                row.extend((int(k), float(k), str(k)))

        def do_write():
            with fts:
                for j in xrange(20):
                    fts.write_many(data)

        pr = cProfile.Profile()
        pr.enable()
        pr.runcall(do_write)
        pr.disable()

        filename = os.path.join(self.run_context.logs_dir,
                                'fast_table_write_mixed_profile.txt')
        with open(filename, 'w') as f:
            stats = pstats.Stats(pr, stream=f)
            stats.print_stats()
        self.LOGGER.info("Table name {}".format(self.table))

    def testValidateNewTable(self):
        drop_table_if_exists(self.table)
        table_stream = FastTableStream(self.table)
        table_stream.validate_write_inputs()

    def testValidateFailNewEmptyTable(self):
        drop_table_if_exists(self.table)
        del self.table[:]
        table_stream = FastTableStream(self.table)
        try:
            table_stream.validate_write_inputs()
        except ValueError:
            # Expected error
            return
        self.fail(
            "Expected a ValueError if called on to create a new table with no fields"
        )

    def testValidateExistingTable(self):
        drop_table_if_exists(self.table)
        self.db_context.executeNoResults(self.table.definition)
        table_stream = FastTableStream(self.table)
        table_stream.validate_write_inputs()

    def testValidateExistingTableAgainstEmptySpec(self):
        drop_table_if_exists(self.table)
        self.db_context.executeNoResults(self.table.definition)
        del self.table[:]
        table_stream = FastTableStream(self.table)
        table_stream.validate_write_inputs()

    def testValidateFailWrongColumnCount(self):
        drop_table_if_exists(self.table)
        self.db_context.executeNoResults(self.table.definition)
        self.table.add(FieldSpec('col7', 'NVARCHAR', 15))
        table_stream = FastTableStream(self.table)
        try:
            table_stream.validate_write_inputs()
        except ValueError:
            # Expected error
            return
        self.fail(
            "Expected a ValueError if TableSpec has different column count from db"
        )

    def testValidateFailWrongColumnName(self):
        drop_table_if_exists(self.table)
        self.db_context.executeNoResults(self.table.definition)
        col1 = self.table.pop('col1')
        col1.field_name = 'col_new'
        self.table.add(col1)
        table_stream = FastTableStream(self.table)
        try:
            table_stream.validate_write_inputs()
        except ValueError:
            # Expected error
            return
        self.fail("Expected a ValueError if column names do not match")

    def testValidateFailWrongColumnType(self):
        drop_table_if_exists(self.table)
        self.db_context.executeNoResults(self.table.definition)
        self.table['col1'].basic_type = 'VARCHAR'
        table_stream = FastTableStream(self.table)
        try:
            table_stream.validate_write_inputs()
        except ValueError:
            # Expected error
            return
        self.fail(
            "Expected a ValueError if db column type is different from TableSpec column type"
        )

    def testValidateFailColumnTooShort(self):
        drop_table_if_exists(self.table)
        self.db_context.executeNoResults(self.table.definition)
        self.table['col1'].data_length = 100
        table_stream = FastTableStream(self.table)
        try:
            table_stream.validate_write_inputs()
        except ValueError:
            # Expected error
            return
        self.fail(
            "Expected a ValueError if db column is shorter than TableSpec column"
        )

    def testValidateColumnLonger(self):
        drop_table_if_exists(self.table)
        self.db_context.executeNoResults(self.table.definition)
        self.table['col1'].data_length = 1
        table_stream = FastTableStream(self.table)
        table_stream.validate_write_inputs()