# uses the table dictionary (found in the myspot module unless specified here) parser.add_argument("-d", "--dict", help=":path to YAML table specification", default='table') args = parser.parse_args() parser.parse_args() t_tab = args.target_table o_tab = args.target_table + args.suffix_original c_tab = args.target_table + args.suffix_index print "\nOK: Running index_table.py for %s\n" % o_tab cursor, connection = sql_connect(args.target_db, connection=True) tdict = get_yaml_dict('table', local_dict=True) fdict = get_yaml_dict('field', local_dict=True) cdict = get_yaml_dict('corrections', local_dict=True) tspec = get_tspec(tdict, t_tab) pkeys = tspec['pkey'] # if there are excel sheet, row references then grab these as well field_names = sql_get_cols(args.target_db, o_tab) if 'modifiedat' in field_names: field_names.remove('modifiedat') excel_source = False if 'excel_sheet' in field_names and 'excel_row' in field_names:
# sys.exit(1) # table_out = table_in[:-3] source_db = target_db else: # see above - tables will have diff names if in same db source_db = args.source_db table_dict_name = table_in.lower() # table_out = table_in print "OK: Input table: %s from %s\nOK: Target database: %s" % ( table_in, source_db, target_db) # print table_in, table_out # sys.exit(1) # check and connect to database provided using 'target_db' cursor_out = myspot.sql_connect(target_db) cursor_in = myspot.sql_connect(source_db) table_out = table_dict_name + '_import' if myspot.sql_tab_exists(target_db, table_out): print "WARNING: table %s already exists in database %s" % (target_db, table_out) if args.replace_table: stmt = "DROP TABLE IF EXISTS %s " % (table_out) else: print "ERROR: You must specify the '-replace' option to delete existing tables" sys.exit(1) # check you can open the dictionares OK tdict = myspot.get_yaml_dict('table', local_dict=True) fdict = myspot.get_yaml_dict('field', local_dict=True) # falias_dict = myspot.reverse_field_dict(local_dict = True)
print table_dict_name # sys.exit(1) # table_out = table_in[:-3] source_db = target_db else: # see above - tables will have diff names if in same db source_db = args.source_db table_dict_name = table_in.lower() # table_out = table_in print "OK: Input table: %s from %s\nOK: Target database: %s" % ( table_in, source_db, target_db) # print table_in, table_out # sys.exit(1) # check and connect to database provided using 'target_db' cursor_out = myspot.sql_connect(target_db) cursor_in = myspot.sql_connect(source_db) table_out = table_dict_name + '_import' if myspot.sql_tab_exists(target_db, table_out): print "WARNING: table %s already exists in database %s" % (target_db, table_out) if args.replace_table: stmt = "DROP TABLE IF EXISTS %s " % (table_out) else: print "ERROR: You must specify the '-replace' option to delete existing tables" sys.exit(1) # check you can open the dictionares OK tdict = myspot.get_yaml_dict('table', local_dict=True) fdict = myspot.get_yaml_dict('field', local_dict=True)
fdict_lookup[f['fname'].lower()] = f # print fdict_lookup['lrti_micro'] # sys.exit(1) # DEBUGGING: 2012-08-24 - # Get primary key definition - should be a list of fields pkeys = tuple(tspec['pkey']) # Derived fields if 'derivedfields' in tspec: derivedfields = tspec['derivedfields'] else: derivedfields = [] # SQL database connection cursor = sql_connect(db_name) # Run pre-flight SQL script if 'preflight' in tspec and tspec['preflight'] is not None: sql_multistmt(cursor, tspec['preflight']) # Delete existing table # Means field list order will not depend on sourcefields or sql_select # But instead on sql_get cols # stmt = "DROP TABLE IF EXISTS %s" % tab_name # cursor.execute(stmt) # Decide if this is a primary table or a derived one # i.e. if you need an sql_create statement if tspec['type'] == 'primary' and 'sql_select' not in tspec: # Assume that raw indexed 'source tables' always have the suffix _ix
# sys.exit(1) # DEBUGGING: 2012-08-24 - # Get primary key definition - should be a list of fields pkeys = tuple(tspec['pkey']) # Derived fields if 'derivedfields' in tspec: derivedfields = tspec['derivedfields'] else: derivedfields = [] # SQL database connection cursor = sql_connect(db_name) # Run pre-flight SQL script if 'preflight' in tspec and tspec['preflight'] is not None: sql_multistmt(cursor, tspec['preflight']) # Delete existing table # Means field list order will not depend on sourcefields or sql_select # But instead on sql_get cols # stmt = "DROP TABLE IF EXISTS %s" % tab_name # cursor.execute(stmt) # Decide if this is a primary table or a derived one # i.e. if you need an sql_create statement if tspec['type'] == 'primary' and 'sql_select' not in tspec: # Assume that raw indexed 'source tables' always have the suffix _ix
XL_DATEMODE = xlbook.datemode except: print "ERROR: open_workbook method failed on '%s'" % input_file sys.exit(1) print "OK: Will import %r" % xlsheets # ditto for tables if args.this_table is None: print "OK: Will generate new table for each sheet" else: print "OK: Will create table %s" % args.this_table # check and connect to database provided using 'target_db' # __________________________________________________________ my_cursor = myspot.sql_connect(target_db) # check you can open the dictionares OK # ____________________________________________ tdict = myspot.get_yaml_dict('table', local_dict = True) tdict_lookup = {t['tablename']:t for t in tdict} fdict = myspot.get_yaml_dict('field', local_dict = True) fdict_lookup = {f['fname']:f for f in fdict} reverse_fdict = myspot.reverse_field_dict() # ============================================================= # = Check sheet not already imported using excel_sheets table = # ============================================================= new_sheets = [] for sheet in xlsheets:
sh_dict.write(i + tab_offset, 1, fdict_lookup[fname]['varlab']) sh_dict.write(i + tab_offset, 2, tab) sh_dict.write(i + tab_offset, 3, tab_label) next_offset = i + tab_offset tab_offset = next_offset + 1 dvr_book.save('../sitecomms/outgoing/' + dvr_filename) # ======================== # = Add key errors sheet = # ======================== kerror_headings_original = ['validation_msg', 'key_fields', 'key_values', 'missing_fields'] columns_to_skip = ['modifiedat', 'sourceFileTimeStamp'] columns_to_redact = ['dob', 'namef', 'namel', 'idnhs', 'idpcode'] cursor = sql_connect('spotid') for tab, tab_name in source_tables.items(): kerror_fields = sql_get_cols('spotid', 'keys_dvr') stmt = """SELECT %s FROM keys_dvr WHERE locate('%s', key_values) AND sql_table = '%s' """ % ( ', '.join(kerror_fields), sitecode, tab + '_import') cursor.execute(stmt) kerror_rows = cursor.fetchall() # skip on if no key errors if not kerror_rows: continue # create dictionary of kerror vals krows = [] for kerror_row in kerror_rows:
sh_dict.write(i + tab_offset, 3, tab_label) next_offset = i + tab_offset tab_offset = next_offset + 1 dvr_book.save('../sitecomms/outgoing/' + dvr_filename) # ======================== # = Add key errors sheet = # ======================== kerror_headings_original = [ 'validation_msg', 'key_fields', 'key_values', 'missing_fields' ] columns_to_skip = ['modifiedat', 'sourceFileTimeStamp'] columns_to_redact = ['dob', 'namef', 'namel', 'idnhs', 'idpcode'] cursor = sql_connect('spotid') for tab, tab_name in source_tables.items(): kerror_fields = sql_get_cols('spotid', 'keys_dvr') stmt = """SELECT %s FROM keys_dvr WHERE locate('%s', key_values) AND sql_table = '%s' """ % (', '.join(kerror_fields), sitecode, tab + '_import') cursor.execute(stmt) kerror_rows = cursor.fetchall() # skip on if no key errors if not kerror_rows: continue # create dictionary of kerror vals krows = [] for kerror_row in kerror_rows: