Exemplo n.º 1
0
 def _normalize_name( name ):
     if isinstance( name, FieldSpec ):
         return name.field_name
     elif isinstance( name, ( str, unicode ) ):
         return db_identifier_quote ( name )
     else:
         raise ValueError( "Don't know how to index using a {}".format( name.__class__ ) )
Exemplo n.º 2
0
    def createTable( self ):
        """ Create a SQL table from a spreadsheet.
        
        Extended Overview
        ---------------------
        We call the getRows() method, and read in `buffer_size` rows into memory at a time, calling
        executemany() to insert them a chunk at a time. We do this in a loop until all rows are read in.
        
        TODO: How to handle if they have too many columns in the file? Can't add more than 1024
        """
        self._validate_create_table_inputs()
     
        # Delete table if it exists
        drop_table_if_exists( self.outputTable, self.db_context )
        
        #get the row iterator and update one row to get it to populate self.outputTable
        rowiter = self.getRows( self._table_list_factory )
        values = [ next( rowiter, None ) ]
        if values[0] is None:
            raise ValueError( "Empty file given" )
        
        # get column names for table before adding the import order column
        names = self.outputTable[:]
        
        # add import_order column
        self.outputTable.add( FieldSpec( self.importOrder,'bigint',identity=(1,1) ) )
        
        #create the table
        query = self.outputTable.definition
        self.db_context.executeNoResults( query )
        
        _insert_query = _INSERT_INTO_TABLE_QUERY.format( outputTable=self.outputTable,fields=Joiner( names ) )
        
        # now go through the file and get `buffer_size` arrays of values and insert them in chunks
        while ( True ):
            # fill buffer
            while ( len(values) < self.buffer_size ):
                row = next( rowiter, None )
                if row is None:
                    break
                values.append( row )
            if values is None or len( values ) == 0:
                break
            #insert into table
            self.db_context.executemany( _insert_query, values )
            values = []
        
        # commit changes
        self.db_context.commit()

        # Drop all completely blank rows
        query = _DELETE_MISSING_QUERY_PATTERN.format( outputTable=self.outputTable,columnNames = Joiner( names ) )
        self.db_context.executeNoResults( query )
        
        # Now add the import_order column to the primiary key
        query = _ADD_PK_QUERY.format( outputTable=self.outputTable, importOrder = db_identifier_quote( self.importOrder ) )
        self.db_context.executeNoResults( query )
def _get_best_schema(db_context, table_schema):
    """Returns the "best" schema name to use.
    
    Returns table_schema if it is not None, else db_context.schema
    """
    if table_schema is None:
        return db_context.schema
    else:
        return db_identifier_quote(table_schema)
Exemplo n.º 4
0
 def db_name(self):
     return db_identifier_quote( self.db )
Exemplo n.º 5
0
 def schema(self, value):
     self._schema = db_identifier_quote( value )
Exemplo n.º 6
0
 def alias(self, value):
     self._alias = db_identifier_quote(value)
Exemplo n.º 7
0
 def field_name(self, value):
     self._field_name = db_identifier_quote(value)
Exemplo n.º 8
0
 def table_schema( self, value ):
     self._table_schema = db_identifier_quote( value )
Exemplo n.º 9
0
 def table_name( self, value ):
     if isinstance( value, TableSpec ):
         self._table_name = value.table_name
     else:
         self._table_name = db_identifier_quote( value )