Esempio n. 1
0
parser.add_argument("-d",
                    "--dict",
                    help=":path to YAML table specification",
                    default='table')

args = parser.parse_args()
parser.parse_args()

t_tab = args.target_table
o_tab = args.target_table + args.suffix_original
c_tab = args.target_table + args.suffix_index
print "\nOK: Running index_table.py for %s\n" % o_tab

cursor, connection = sql_connect(args.target_db, connection=True)

tdict = get_yaml_dict('table', local_dict=True)
fdict = get_yaml_dict('field', local_dict=True)
cdict = get_yaml_dict('corrections', local_dict=True)

tspec = get_tspec(tdict, t_tab)
pkeys = tspec['pkey']

# if there are excel sheet, row references then grab these as well
field_names = sql_get_cols(args.target_db, o_tab)
if 'modifiedat' in field_names:
    field_names.remove('modifiedat')

excel_source = False
if 'excel_sheet' in field_names and 'excel_row' in field_names:
    excel_source = True
#     stmt = """ SELECT excel_sheet, excel_row FROM %s """ % (o_tab)
Esempio n. 2
0
# sys.exit(1)
# check and connect to database provided using 'target_db'
cursor_out = myspot.sql_connect(target_db)
cursor_in = myspot.sql_connect(source_db)

table_out = table_dict_name + '_import'
if myspot.sql_tab_exists(target_db, table_out):
    print "WARNING: table %s already exists in database %s" % (target_db, table_out)
    if args.replace_table:
        stmt = "DROP TABLE IF EXISTS %s " % (table_out)
    else:
        print "ERROR: You must specify the '-replace' option to delete existing tables"
        sys.exit(1)

# check you can open the dictionares OK
tdict = myspot.get_yaml_dict('table', local_dict=True)
fdict = myspot.get_yaml_dict('field', local_dict=True)
# falias_dict = myspot.reverse_field_dict(local_dict = True)
# now focus the falias_dict on the likely tables
falias_dict = myspot.make_talias_dict(fdict)
talias_dict = falias_dict['no_alias']
# print len(talias_dict)
if table_dict_name in falias_dict:
    # print table_dict_name
    # print falias_dict[table_dict_name]
    talias_dict.update(falias_dict[table_dict_name])
talias_dict = {i[0].lower():i[1] for i in talias_dict.items()}
debug = False
if debug:
    print len(talias_dict)
    print talias_dict
parser.parse_args()

# define the regex to match filenames with yml extension
filename_target = re.compile(r"""\w+?\.yml""")
check_file(args.var_file, filename_target)
var_file = args.var_file
attribute_format = args.format

my_path = os.path.split(var_file)[0]

var_file_object = open(var_file, 'r')
my_vars = yaml.load(var_file_object.read())
# Strip out duplicates
my_vars = list(set(my_vars))

fdict = get_yaml_dict('field', return_type='dictionary', local_dict=True)

noformat_list = ['fname', 'stataformat']
format_list = ['tablerowlabel', 'unitlabel']
vallab_list = ['var_level', 'var_level_lab']
attribute_list = []
attribute_list.extend(noformat_list)
attribute_list.extend(format_list)
output_list = []
output = []
# CHANGED: 2013-01-25 - stata does not handle the quotes well
# output_list.append("""'""" + """'\t'""".join(attribute_list) + """'\n""")
column_headers = copy.deepcopy(attribute_list)
column_headers.extend(vallab_list)
output_list.append("""\t""".join(column_headers) + """\n""")
print output_list
Esempio n. 4
0
# uses the table dictionary (found in the myspot module unless specified here)
parser.add_argument("-d", "--dict",
    help=":path to YAML table specification",
    default='table')

args = parser.parse_args()
parser.parse_args()

t_tab = args.target_table
o_tab = args.target_table + args.suffix_original
c_tab = args.target_table + args.suffix_index
print "\nOK: Running index_table.py for %s\n" % o_tab

cursor, connection = sql_connect(args.target_db, connection=True)

tdict = get_yaml_dict('table', local_dict=True)
fdict = get_yaml_dict('field', local_dict=True)
cdict = get_yaml_dict('corrections', local_dict=True)

tspec = get_tspec(tdict, t_tab)
pkeys = tspec['pkey']

# if there are excel sheet, row references then grab these as well
field_names = sql_get_cols(args.target_db, o_tab)
if 'modifiedat' in field_names:
    field_names.remove('modifiedat')

excel_source = False
if 'excel_sheet' in field_names and 'excel_row' in field_names:
    excel_source = True
#     stmt = """ SELECT excel_sheet, excel_row FROM %s """ % (o_tab)
Esempio n. 5
0
#!/usr/bin/python
#  =====================================================
#  = Export field dictionary in humand readable format =
#  =====================================================
import sys      # file input / output
# TODO: 2012-08-17 - push local myspot changes back to main myspot
# Use local version of myspot module
# You can switch back to using the main myspot module if you push local changes back

sys.path.remove('/Users/steve/usr/local/lib')
sys.path.append('/Users/steve/data/spot_id/local/lib_usr')
import myspot
import mypy
import os

fdict_unsorted = myspot.get_yaml_dict('field', return_type='list', local_dict=True)
# print fdict[:2]
fdict = sorted(fdict_unsorted, key=lambda f: f['fname'].lower())

print len(fdict)

text_list = []

for f in fdict:
    if 'derived' in f:
        if f['derived']:
            continue
    if f['fname'][0] == '_':
        continue
    if 'vallab' in f:
        vallab = ["\n- Code and label\n"]
Esempio n. 6
0
# check and connect to database provided using 'target_db'
cursor_out = myspot.sql_connect(target_db)
cursor_in = myspot.sql_connect(source_db)

table_out = table_dict_name + '_import'
if myspot.sql_tab_exists(target_db, table_out):
    print "WARNING: table %s already exists in database %s" % (target_db,
                                                               table_out)
    if args.replace_table:
        stmt = "DROP TABLE IF EXISTS %s " % (table_out)
    else:
        print "ERROR: You must specify the '-replace' option to delete existing tables"
        sys.exit(1)

# check you can open the dictionares OK
tdict = myspot.get_yaml_dict('table', local_dict=True)
fdict = myspot.get_yaml_dict('field', local_dict=True)
# falias_dict = myspot.reverse_field_dict(local_dict = True)
# now focus the falias_dict on the likely tables
falias_dict = myspot.make_talias_dict(fdict)
talias_dict = falias_dict['no_alias']
# print len(talias_dict)
if table_dict_name in falias_dict:
    # print table_dict_name
    # print falias_dict[table_dict_name]
    talias_dict.update(falias_dict[table_dict_name])
talias_dict = {i[0].lower(): i[1] for i in talias_dict.items()}
debug = False
if debug:
    print len(talias_dict)
    print talias_dict
Esempio n. 7
0
mysql_output_lvl = args.output_lvl
print "\nOK: Running make_table.py for %s \n" % tab_name

if args.sql_out is None:
    sql_out = db_name
else:
    sql_out = args.sql_out



#  =====================
#  = Load dictionaries =
#  =====================

# TODO: 2012-08-17 - reset to global dict when development finished
fdict = get_yaml_dict('field', local_dict = True)
tdict = get_yaml_dict('table', local_dict = True)
vdict = get_yaml_dict('checks', local_dict = True)

tspec = get_tspec(tdict, tab_name)

talias_dict = make_talias_dict(fdict)
# print talias_dict
falias_dict = talias_dict['no_alias']
# print len(falias_dict)
# print tab_name
if tab_name in talias_dict:
    falias_dict.update(talias_dict[tab_name])
# print len(falias_dict)
# print falias_dict
Esempio n. 8
0
tab_name = args.table_spec
pickle_it = args.pickle_it
mysql_output_lvl = args.output_lvl
print "\nOK: Running make_table.py for %s \n" % tab_name

if args.sql_out is None:
    sql_out = db_name
else:
    sql_out = args.sql_out

#  =====================
#  = Load dictionaries =
#  =====================

# TODO: 2012-08-17 - reset to global dict when development finished
fdict = get_yaml_dict('field', local_dict=True)
tdict = get_yaml_dict('table', local_dict=True)
vdict = get_yaml_dict('checks', local_dict=True)

tspec = get_tspec(tdict, tab_name)

talias_dict = make_talias_dict(fdict)
# print talias_dict
falias_dict = talias_dict['no_alias']
# print len(falias_dict)
# print tab_name
if tab_name in talias_dict:
    falias_dict.update(talias_dict[tab_name])
# print len(falias_dict)
# print falias_dict
Esempio n. 9
0
    p = subprocess.Popen(['pandoc', '--from=markdown', '--to=latex'],
                         stdin=subprocess.PIPE, stdout=subprocess.PIPE)
    return p.communicate(md)[0]

import subprocess
import sys      # file input / output
from unidecode import unidecode
import yaml
import myspot
import mypy
import os
import string

# =================================
# Step 3 - get the field dictionary
fdict = myspot.get_yaml_dict('field', return_type='dictionary', local_dict=False)
print len(fdict)

# Now define your own appendix - don't use latex glossary
# _______________________________________________________
entries = {}
for k, fentry in fdict.items():
    if 'definition' not in fentry:
        continue
    if fentry.get('source') is None:
        continue
    entry = []
    if 'tablerowlabel' in fentry:
        entry_heading = "## %s" % fentry['tablerowlabel']['latex']
    else:
        entry_heading = "## %s" % fentry['varlab']
Esempio n. 10
0
parser.parse_args()

# define the regex to match filenames with yml extension
filename_target = re.compile(r"""\w+?\.yml""")
check_file(args.var_file, filename_target)
var_file = args.var_file
attribute_format = args.format

my_path = os.path.split(var_file)[0]

var_file_object = open(var_file, "r")
my_vars = yaml.load(var_file_object.read())
# Strip out duplicates
my_vars = list(set(my_vars))

fdict = get_yaml_dict("field", return_type="dictionary", local_dict=True)

noformat_list = ["fname", "stataformat"]
format_list = ["tablerowlabel", "unitlabel"]
vallab_list = ["var_level", "var_level_lab"]
attribute_list = []
attribute_list.extend(noformat_list)
attribute_list.extend(format_list)
output_list = []
output = []
# CHANGED: 2013-01-25 - stata does not handle the quotes well
# output_list.append("""'""" + """'\t'""".join(attribute_list) + """'\n""")
column_headers = copy.deepcopy(attribute_list)
column_headers.extend(vallab_list)
output_list.append("""\t""".join(column_headers) + """\n""")
print output_list
parser.parse_args()

# define the regex to match filenames with yml extension
filename_target = re.compile(r"""\w+?\.yml""")
check_file(args.var_file, filename_target)
var_file = args.var_file
attribute_format = args.format

my_path = os.path.split(var_file)[0]

var_file_object = open(var_file, 'r')
my_vars = yaml.load(var_file_object.read())
# Strip out duplicates
my_vars = list(set(my_vars))

fdict = get_yaml_dict('field', return_type='dictionary', local_dict=True)

noformat_list = ['fname', 'stataformat']
format_list = ['tablerowlabel', 'unitlabel']
vallab_list = ['var_level', 'var_level_lab']
attribute_list = []
attribute_list.extend(noformat_list)
attribute_list.extend(format_list)
output_list = []
output = []
# CHANGED: 2013-01-25 - stata does not handle the quotes well
# output_list.append("""'""" + """'\t'""".join(attribute_list) + """'\n""")
column_headers = copy.deepcopy(attribute_list)
column_headers.extend(vallab_list)
output_list.append("""\t""".join(column_headers) + """\n""")
print output_list
Esempio n. 12
0
- saves to ../sitecomms/outgoing unless otherwise specified
""")

    parser.add_argument("sitecode", help=":3 letter sitecode for which you require dvr")
    args = parser.parse_args()
    parser.parse_args()
    sitecode = args.sitecode.lower()
    sitecodes = []
    if sitecode == 'all':
        sitecodes.extend(['ucl', 'ket', 'sou', 'med', 'pol', 'yeo', 'nor', 'lis', 'har', 'rvi', 'fre'])
    else:
        sitecodes.append(sitecode)

# Pull fdict lookup now because need to look-up hosp name

fdict_lookup = get_yaml_dict('field', return_type='dictionary', local_dict=True)
# open the pickled data - this will take a long time!
pickle_rows = open_pickles()

#  =======================
#  = Pull table spec etc =
#  =======================
tdict_lookup = get_yaml_dict('table', return_type='dictionary', local_dict=True)
talias_dict = make_talias_dict(get_yaml_dict('field', local_dict=True))
source_tables = {
    'idpid': '(SPOT)id web portal',
    'idpatient': 'Initial assessment',
    'idvisit': 'Daily assessment',
    'idlab': 'Laboratory flow chart',
    'idlabuclh': 'Biological sampling (UCLH only)'
}
Esempio n. 13
0
    args = parser.parse_args()
    parser.parse_args()
    sitecode = args.sitecode.lower()
    sitecodes = []
    if sitecode == 'all':
        sitecodes.extend([
            'ucl', 'ket', 'sou', 'med', 'pol', 'yeo', 'nor', 'lis', 'har',
            'rvi', 'fre'
        ])
    else:
        sitecodes.append(sitecode)

# Pull fdict lookup now because need to look-up hosp name

fdict_lookup = get_yaml_dict('field',
                             return_type='dictionary',
                             local_dict=True)
# open the pickled data - this will take a long time!
pickle_rows = open_pickles()

#  =======================
#  = Pull table spec etc =
#  =======================
tdict_lookup = get_yaml_dict('table',
                             return_type='dictionary',
                             local_dict=True)
talias_dict = make_talias_dict(get_yaml_dict('field', local_dict=True))
source_tables = {
    'idpid': '(SPOT)id web portal',
    'idpatient': 'Initial assessment',
    'idvisit': 'Daily assessment',