def insert_ssr(id, sess, yr, bldg, room, billcode, intendhsg, rsvstat, EARL): try: '''Basic insert sql''' q_ins = '''insert into stu_serv_rec (id, sess, yr, rsv_stat, intend_hsg, campus, bldg, room, pref_rm_type, roommate_sts, park_location, bill_code ) values ({0}, "{1}", {2}, "{7}", "{6}", "MAIN", "{3}", "{4}", "", "", "", "{5}")'''.format(id, sess, yr, bldg, room, billcode, intendhsg, rsvstat ) # print(q_ins) connection = get_connection(EARL) with connection: cur = connection.cursor() cur.execute(q_ins) connection.commit() return 1 except Exception as e: print("Error on insert " + repr(e)) return 0
def check_for_constituents(EARL): try: """"--------GET THE TOKEN------------------""" current_token = fn_do_token() """-----Get a list of constituents with a custom field of Student Status - STORE the id in cvid_rec-------""" """---We need this to match Carthage ID to Blackbaud ID------""" """ UPDATE 1/17/20 It will more likely be the case that we will get a csv list from advancement of the students added. If so, we can read that csv and find the BB_ID only for those students""" """The date of the last search will be stored in Cache""" #searchtime = cache.get('last_const_date') searchtime = cache.get('Sql_date') # API call to get BB ID x = get_constituents_custom_field_list(current_token, str(searchtime)) if x == 0: print("No recent student entries in RE") else: print(x) for i in x['value']: bb_id = i["parent_id"] # Look for ID in cvid_rec chk_sql = '''select cx_id, re_api_id from cvid_rec where re_api_id = {}'''.format(i['parent_id']) connection = get_connection(EARL) with connection: data_result = xsql(chk_sql, connection, key='debug') x = data_result.fetchone() # Create the cvid_rec if it doesn't exist - Will require # second call to API to retrieve the carthage id using # the blackbaud id if x is None: carth_id = get_lookup_id(current_token, bb_id) ret = fn_update_local(carth_id, bb_id, EARL) else: print("CVID Rec exists for" + str(x[0])) # carth_id = x[0] pass except Exception as e: # print("Error in main: " + repr(e)) sqlstate = e.args[1] # print(sqlstate) fn_write_error("Error in sky_constituent_list.py - Main: " + repr(e)) fn_send_mail(settings.BB_SKY_TO_EMAIL, settings.BB_SKY_FROM_EMAIL, "SKY API ERROR", "Error in " "sky_constituent_list.py - for: " + + repr(e))
def main(): """Maxient Upload via sftp.""" phile = os.path.join( settings.BASE_DIR, 'sql/maxient/demographic.sql', ) with open(phile) as incantation: sql = incantation.read() with get_connection() as connection: rows = xsql(sql, connection, key=settings.INFORMIX_DEBUG).fetchall() if rows: # set directory and filename filename = ('{0}CARTHAGE_DEMOGRAPHICS_DATA.txt'.format( settings.MAXIENT_CSV_OUTPUT, )) # create txt file using pipe delimiter with open(filename, 'w') as maxientfile: output = csv.writer(maxientfile, delimiter='|') if DEBUG: # No Header required but used for testing output.writerow(settings.MAXIENT_HEADERS) for row in rows: output.writerow(row) # SFTP connection information cnopts = pysftp.CnOpts() cnopts.hostkeys = None xtrnl_connection = { 'host': settings.MAXIENT_HOST, 'username': settings.MAXIENT_USER, 'private_key': settings.MAXIENT_PKEY, 'private_key_pass': settings.MAXIENT_PASS, 'cnopts': cnopts, } # go to our storage directory on the server os.chdir(settings.MAXIENT_CSV_OUTPUT) try: with pysftp.Connection(**xtrnl_connection) as sftp: sftp.chdir('incoming/') sftp.put(filename, preserve_mtime=True) if DEBUG: print("success: MAXIENT UPLOAD") except Exception as error: send_mail( None, settings.MAXIENT_TO_EMAIL, '[Maxient SFTP] MAXIENT UPLOAD failed', settings.MAXIENT_FROM_EMAIL, 'email.html', 'Unable to upload to Maxient server.\n\n{0}'.format(error), ) if DEBUG: print(error) else: print('There was a no values in list error')
def main(): """OCLC Synchronization.""" folks = [] phile = os.path.join( settings.BASE_DIR, 'sql/oclc/student_facstaff.sql', ) with open(phile) as incantation: sql = incantation.read() with get_connection() as connection: rows = xsql(sql, connection, key=settings.INFORMIX_DEBUG).fetchall() for row in rows: folks.append({ 'lastname': row.lastname, 'firstname': row.firstname, 'middlename': row.middlename, 'id': row.id, 'addr_line1': row.addr_line1, 'addr_line2': row.addr_line2, 'city': row.city, 'st': row.st, 'ctry': row.ctry, 'zip': row.zip, 'phone': row.phone, 'email': row.email, 'groupIndex': row[settings.OCLC_GROUPINDEX_LIST_INDEX], 'grouping': row.grouping, 'expirationDate': row.expirationdate, }) template = loader.get_template('oclc/personas.xml') xml = template.render({'objs': folks, 'next_year': NEXT_YEAR}) xml_path = "{0}carthage_personas_draft_{1:%Y-%m-%d}.xml".format( settings.OCLC_LOCAL_PATH, NOW, ) cnopts = pysftp.CnOpts() cnopts.hostkeys = None xtrnl_connection = { 'host': settings.OCLC_XTRNL_SRVR, 'username': settings.OCLC_XTRNL_USER, 'password': settings.OCLC_XTRNL_PASS, 'cnopts': cnopts, } with io.open(xml_path, 'w', encoding='utf8') as xml_file: xml_file.write(xml) xfile = "carthage_personas_draft_{0:%Y-%m-%d}.xml".format(NOW) with pysftp.Connection(**xtrnl_connection) as sftp: sftp.cwd(settings.OCLC_XTRNL_PATH) sftp.put(xml_path, xfile) sftp.close()
def fn_check_cx_records(totcod, prd, jndate, stuid, amt, EARL): try: billqry = '''select SA.id, IR.fullname, ST.subs_no, SE.jrnl_date, ST.prd, ST.subs, STR.bal_code, ST.tot_code, SE.descr, SE.ctgry, STR.amt, ST.amt_inv_act, SA.stat from subtr_rec STR left join subt_rec ST on STR.subs = ST.subs and STR.subs_no = ST.subs_no and STR.tot_code = ST.tot_code and STR.tot_prd = ST.prd left join sube_rec SE on SE.subs = STR.subs and SE.subs_no = STR.subs_no and SE.sube_no = STR.ent_no left join suba_rec SA on SA.subs = SE.subs and SA.suba_no = SE.subs_no left join id_rec IR on IR.id = SA.id where STR.subs = 'S/A' and STR.tot_code = "{0}" and STR.tot_prd = "{1}" and jrnl_date = "{2}" and IR.id = {3} and STR.amt = {4} '''.format(totcod, prd, jndate, stuid, amt) # print(jndate) # print(billqry) # ret = do_sql(billqry, earl=EARL) # print(ret) # Get the current term # print(EARL) connection = get_connection(EARL) # connection closes when exiting the 'with' block # print("Connection established") with connection: data_result = xsql(billqry, connection, key=settings.INFORMIX_DEBUG).fetchall() # print("Data returned") ret = list(data_result) # print(ret) # if ret is None: # if ret == []: if not ret: return 0 else: return 1 except Exception as e: print("Error in misc_fees.py - fn_check_cx_records: " + repr(e)) # fn_write_error("Error in misc_fees.py - Main: " # + e.message) return 0
def main(): """Package concierge upload.""" # determines which database is being called from the command line if database == 'cars': earl = settings.INFORMIX_ODBC elif database == 'train': earl = settings.INFORMIX_ODBC_TRAIN else: print('invalid database name: {0}'.format(database)) sys.exit() phile = os.path.join( settings.BASE_DIR, 'sql/concierge/students.sql', ) with open(phile) as incantation: sql = incantation.read() with get_connection(earl) as connection: rows = xsql(sql, connection, key=settings.INFORMIX_DEBUG).fetchall() if rows: # set directory and filename to be stored filename = '{0}students.csv'.format(settings.CONCIERGE_CSV_OUTPUT) # create .csv file with open(filename, 'w') as csvfile: output = csv.writer(csvfile) output.writerow([ 'Unit Code', 'First Name', 'Last Name', 'Email Address', 'Cell Phone', ]) # creating the data rows for the .csv files for row in rows: output.writerow([ row.unitcode, row.firstname, row.lastname, row.emailaddress, row.cellphone, ]) if not DEBUG: file_upload(filename) else: send_mail( None, TO, SUBJECT(status='failed'), FROM, 'email.html', 'No values in list.', )
def get_finaid(cid): """Determine if the student must complete the Exit Counseling Form.""" phile = os.path.join(settings.BASE_DIR, 'sql/finaid.sql') with open(phile) as incantation: sql = '{0} AND id={1}'.format(incantation.read(), cid) connection = get_connection() with connection: row = xsql(sql, connection, key=settings.INFORMIX_DEBUG).fetchone() status = False if row: status = True return status
def get_orgs(cid): """Fetch the clubs and orgs from informix.""" orgs = [] phile = os.path.join(settings.BASE_DIR, 'sql/clubsorgs_student.sql') with open(phile) as incantation: sql = '{0} {1}'.format(incantation.read(), cid) connection = get_connection() with connection: rows = xsql(sql, connection, key=settings.INFORMIX_DEBUG).fetchall() for row in rows: if row.name not in orgs: orgs.append(row.name) return orgs
def fn_update_local(carth_id, bb_id, EARL): try: q_upd_sql = '''UPDATE cvid_rec SET re_api_id = ? WHERE cx_id = ? ''' q_upd_args = (bb_id, carth_id) connection = get_connection(EARL) with connection: cur = connection.cursor() cur.execute(q_upd_sql, q_upd_args) return 1 except pyodbc.Error as err: print("Error in fn_update_local: " + str(err)) sqlstate = err.args[0] print(sqlstate) return 0
def main(): ''' main function ''' sql = """ SELECT lastname, firstname, username FROM provisioning_vw WHERE {} is not null ORDER BY lastname, firstname LIMIT 10 """.format(who) if test: print("sql = {}".format(sql)) logger.debug("sql = {}".format(sql)) else: connection = get_connection() cursor = connection.cursor() objects = cursor.execute(sql) peeps = [] for obj in objects: gn = obj[1] sn = obj[0] un = obj[2] row = { 'lastname': sn, 'firstname': gn, 'email': '{}@carthage.edu'.format(un) } #row = { #'lastname': obj[0], 'firstname': obj[1], #'email': '{}@carthage.edu'.format(obj[2]) #} peeps.append(row) #print(row['lastname']) #print(obj[2],obj[1],obj[d]) #print(gn,sn,un) for p in peeps: for n, v in p.items(): print(n, v)
def main(): """Main function.""" if database == 'jxtest': EARL = settings.INFORMIX_ODBC_JXTEST elif database == 'jxlive': EARL = settings.INFORMIX_ODBC_JXPROD else: EARL = None if test: print(EARL) connection = get_connection(EARL) with connection: if action == 'update': sql = ''' UPDATE class_year SET class_year=0 WHERE class_year=9999 ''' if test: print(sql) else: xsql(sql, connection, key=settings.INFORMIX_DEBUG) elif action == 'select': sql = ''' SELECT * FROM class_year WHERE class_year=9999 ''' rows = xsql(sql, connection, key=settings.INFORMIX_DEBUG) years = rows.fetchall() for y in years: print(y) else: print('how did that happen?') sys.exit()
def get_student(cid): """Determine if the student is eligible to participate.""" key = 'gearup_student_{0}'.format(cid) student = cache.get(key) if not student: phile = os.path.join(settings.BASE_DIR, 'sql/gearup.sql') with open(phile) as incantation: sql = '{0} AND Program_Enrollment_Record.id={1}'.format( incantation.read(), cid, ) with get_connection() as connection: cursor = connection.cursor().execute(sql) columns = [column[0] for column in cursor.description] row = cursor.fetchone() if row: student = dict(zip(columns, row)) cache.set(key, student) return student
def fn_get_name(id, EARL): fname = "" Q_GET_NAME = '''select fullname from id_rec where id = {0}'''.format(id) connection = get_connection(EARL) # connection closes when exiting the 'with' block with connection: data_result = xsql(Q_GET_NAME, connection, key=settings.INFORMIX_DEBUG).fetchall() ret = list(data_result) if ret is None: # print("Name not found") fn_write_error("Error in asign_notify.py - fn_get namen: No " "name found ") quit() fname = str(id) else: for row in ret: fname = row[0] return fname
def main(): try: # set global variable global EARL datetimestr = time.strftime("%Y%m%d%H%M%S") # Defines file names and directory location RE_STU_LOG = settings.BB_LOG_FOLDER + 'RE_student_status' \ + datetimestr + ".txt" # print(RE_STU_LOG) # determines which database is being called from the command line if database == 'cars': EARL = settings.INFORMIX_ODBC elif database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN # print(EARL) """"--------GET THE TOKEN------------------""" current_token = fn_do_token() # print(current_token) """ ----------------------------------------------------------- -1-GET STUDENTS WITH A STATUS CHANGE FROM PROG_ENR_REC----- Assume all current blackbaud students have a re_api_id in the cvid_rec table. This will be handled through a separate prior process. Look for status changes only for students who have the re_api_id entry ----------------------------------------------------------- """ """THis query looks for recent changes in the student status. We do not want to use any records that do NOT have an re_api_id value. It only applies to RE entered students at present""" """To get the last query date from cache""" # last_sql_date = cache.get('Sql_date') # print(last_sql_date) """For periodic multi student runs, only want status for the current term""" statquery = '''select SAR.id, SAR.ACST, '', '', CVR.cx_id, SAR.acst, '','', CVR.cx_id, CVR.re_api_id, '' ,SAR.yr, SAR.sess, SAR.cl from cvid_rec CVR JOIN STU_ACAD_REC SAR on CVR.cx_id = SAR.id where CVR.re_api_id is not null AND SAR.acst not in ('PAST') and SAR.yr in (Select yr from cursessyr_vw) and SAR.sess in (select sess from cursessyr_vw) AND SAR.cl = 'SR' --and SAR.id = 1586698 ''' # print(statquery) connection = get_connection(EARL) with connection: data_result = xsql(statquery, connection).fetchall() ret = list(data_result) if ret: for i in ret: # print(str(i[8]) + " " + i[5] + " " + str(i[9])) carth_id = i[8] acad_stat = i[5] bb_id = i[9] # print(bb_id) """ Look for student and status in local table Else look for student and status at BB via API Add to BB if necessary (THIS WILL BE DONE BY OMATIC) Add or update status in BB Update local table if necessary """ '''------------------------------------------------ --2-UPDATE THE CUSTOM STUDENT STATUS FIELD------- --------------------------------------------------- ''' if bb_id != 0: # print("Update custom field") # Get the row id of the custom field record field_id = get_const_custom_fields( current_token, bb_id, 'Student Status') # print("set custom fields: " + str(carth_id) + ", " # + acad_stat) """ret is the id of the custom record, not the student""" if field_id == 0: # print("Error in student_status_term.py - for: " # + str(carth_id) + ", Unable to get " # "the custom field") fn_write_error( "Error in student_status_term.py - for: " + str(carth_id) + ", Unable to get the " "custom field") fn_send_mail( settings.BB_SKY_TO_EMAIL, settings.BB_SKY_FROM_EMAIL, "SKY API ERROR", "Error in student_status.py - for: " + str(carth_id) + ", Unable to get the custom field") pass else: ret1 = update_const_custom_fields( current_token, str(field_id), 'CX Status ' 'Update', acad_stat) if ret1 == 0: # print("set custom fields: " + str(carth_id) # + ", " + acad_stat) f = open(RE_STU_LOG, "a") f.write("set custom fields: " + str(carth_id) + ", " + acad_stat + '\n') f.close() else: print("Patch failed") else: print("Nobody home") pass # print("Process complete") # fn_send_mail(settings.BB_SKY_TO_EMAIL, # settings.BB_SKY_FROM_EMAIL, "SKY API", # "New records processed for Blackbaud: ") else: print("No changes found") fn_send_mail(settings.BB_SKY_TO_EMAIL, settings.BB_SKY_FROM_EMAIL, "SKY API", "No new records for Blackbaud: ") except Exception as e: print("Error in main: " + str(e)) fn_write_error("Error in student_status_term.py - Main: " + repr(e)) fn_send_mail(settings.BB_SKY_TO_EMAIL, settings.BB_SKY_FROM_EMAIL, "SKY API ERROR", "Error in " "student_status.py - for: " + +repr(e))
def main(): """Terradotta Synchronization.""" phile = os.path.join( settings.BASE_DIR, 'sql/terradotta/student_facstaff.sql', ) with open(phile) as incantation: sql = incantation.read() with get_connection() as connection: rows = xsql(sql, connection, key=settings.INFORMIX_DEBUG).fetchall() if rows: datetimestr = time.strftime('%Y%m%d-%H%M%S') filename = ('{0}terradotta_{1}.csv'.format( settings.TERRADOTTA_CSV_OUTPUT, datetimestr, )) new_filename = ('{0}sis_hr_user_info.txt'.format( settings.TERRADOTTA_CSV_OUTPUT, )) with open(filename, 'w') as csv_file: output = csv.writer(csv_file, dialect='excel-tab') output.writerow([ 'UUUID', 'LAST_NAME', 'FIRST_NAME', 'MIDDLE_NAME', 'EMAIL', 'DOB', 'GENDER', 'CONFIDENTIALITY_INDICATOR', 'MAJOR_1', 'MAJOR_2', 'MINOR_1', 'MINOR_2', 'GPA', 'HOME_ADDRESS_LINE1', 'HOME_ADDRESS_LINE2', 'HOME_ADDRESS_LINE3', 'HOME_ADDRESS_CITY', 'HOME_ADDRESS_STATE', 'HOME_ADDRESS_ZIP', 'HOME_ADDRESS_COUNTRY', 'PHONE_NUMBER', 'CLASS_STANDING', 'EMERGENCY_CONTACT_NAME', 'EMERGENCY_CONTACT_PHONE', 'EMERGENCY_CONTACT_RELATIONSHIP', 'COUNTRY_OF_CITIZENSHIP', 'ETHNICITY', 'PELL_GRANT_STATUS', 'HR_TITLE', 'HR_CAMPUS_PHONE', 'HR_FLAG', 'PLACE_HOLDER_1', 'PLACE_HOLDER_2', 'PLACE_HOLDER_3', 'PLACE_HOLDER_4', 'PLACE_HOLDER_5', 'PLACE_HOLDER_6', 'PLACE_HOLDER_7', 'PLACE_HOLDER_8', 'PLACE_HOLDER_9', 'PLACE_HOLDER_10', 'PLACE_HOLDER_11', 'PLACE_HOLDER_12', 'PLACE_HOLDER_13', 'PLACE_HOLDER_14', 'PLACE_HOLDER_15', ]) for row in rows: output.writerow(row) os.chdir(settings.TERRADOTTA_CSV_OUTPUT) shutil.copy(filename, new_filename) cnopts = pysftp.CnOpts() cnopts.hostkeys = None xtrnl_connection = { 'host': settings.TERRADOTTA_HOST, 'username': settings.TERRADOTTA_USER, 'private_key': settings.TERRADOTTA_PKEY, 'private_key_pass': settings.TERRADOTTA_PASS, 'cnopts': cnopts, } with pysftp.Connection(**xtrnl_connection) as sftp: sftp.put('sis_hr_user_info.txt', preserve_mtime=True) sftp.close()
def main(): try: # set global variable global EARL # determines which database is being called from the command line if database == 'cars': EARL = settings.INFORMIX_ODBC if database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN # if database == 'sandbox': # EARL = settings.INFORMIX_ODBC_SANDBOX else: # # this will raise an error when we call get_engine() # below but the argument parser should have taken # care of this scenario and we will never arrive here. EARL = None # establish database connection """"--------GET THE TOKEN------------------""" current_token = fn_do_token() # print("Current Token = ") # print(current_token) """ ----------------------------------------------------------- -1-GET STUDENTS WITH A STATUS CHANGE FROM PROG_ENR_REC----- ----------------------------------------------------------- """ # for real.. # Two options. Get all changed records, look for local BB ID but ALSO # look for BB ID via API. If there is a record in BB, then add the # BB ID locally if it doesn't exist. # OR # Ignore all changes locally that do not match an existing local BB ID # The latter would be the lowest hits on the API statquery = '''select c.id, rr.constituent_id, rr.reciprocal_id, rrt.value, rrt.code, rt.code, rt.value from constituent c join relationship_role rr on rr.constituent_id = c.id join relationship r on r.id = rr.relationship_id join relationship_role_type rrt on rrt.id= rr.reciprocal_type_id join relationship_type rt on rt.id = rrt.relationship_type_id where c.id in ( select DISTINCT CR.cx_id --, SAR.yr, SAR.sess, SAR.acst from train:cvid_rec CR join stu_acad_rec SAR on SAR.id = CR.cx_id where CR.re_api_id is not null and SAR.yr = YEAR(TODAY) and SAR.ACST IN ('GOOD' ,'LOC' ,'PROB' ,'PROC' ,'PROR' ,'READ' ,'RP' , 'SAB' ,'SHAC' ,'SHOC')) ''' connection = get_connection(EARL) with connection: data_result = xsql(statquery, connection).fetchall() ret = list(data_result) for i in ret: print(str(i[0]) + " " + i[1]) '''------------------------------------------------------- --3-UPDATE THE CUSTOM STUDENT STATUS FIELD---------------- ---------------------------------------------------------- ''' if bb_id != 0: print("Update custom field") # ret = get_const_custom_fields(current_token, bb_id, # 'Student Status') """
def main(): try: """ Term will be RA + Current year One big push for returning students for RC term happens in December Only returning will be in the system, no need to screen out frosh Push again June 30 and July 30 for RC term (will include frosh) Aug 1 start automation for fall term Stop automation for RC on last day of class - appr May 20 May 1, June 30, July 30 December for upcoming term From Aug to Dec, grab all RA current year From Jan to May 1 grab all RC current year On MAY 1, grab all RA current year on June 30 grab all RA current year On third wednesday in December grab all RC Next On Next day in Dec, go back to RA Current Only options are RC20xx and RA20xx, so I only need to determine which year to pass during each time frame. Question is, for spring housing, will both RA and RC need to be dealt with? """ """ This is the command needed to run the script python room_assignments.py --database=train --test -run_mode=auto Must specify the database, whether testing or live and whether user input is required """ # set global variable global EARL # determines which database is being called from the command line if database == 'cars': EARL = settings.INFORMIX_ODBC if database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN else: # # this will raise an error when we call get_engine() # below but the argument parser should have taken # care of this scenario and we will never arrive here. EARL = None # establish database connection if test != "test": API_server = "carthage_thd_prod_support" key = settings.ADIRONDACK_API_SECRET else: API_server = "carthage_thd_test_support" key = settings.ADIRONDACK_TEST_API_SECRET # print(API_server) # print(key) # print(EARL) utcts = fn_get_utcts() """Seconds from UTC Zero hour""" hashstring = str(utcts) + key """Assumes the default UTF-8""" hash_object = hashlib.md5(hashstring.encode()) datetimestr = time.strftime("%Y%m%d%H%M%S") if run_mode == "manual": # print("Manual Mode") session = input("Enter target session (EX. RA 2019): ") hall = fn_translate_bldg_for_adirondack(input("Enter Hall code " "- use ALL or " "specifec bldg: ")) posted = input("Do you want unposted or posted records? " "Enter 0 for unposted, 1 for posted, " "2 for changed, 0,2 for both: ") # print(hall) elif run_mode == "auto": """IT MAY BE BEST TO HARD CODE THE TERM SELECTIONS HERE CAN'T JUST LOOK AHEAD, HAVE TO KEEP KIDS OUT UNTIL A PREFERRED TIME """ q_get_terms = '''select sess, yr, beg_date, end_date from acad_cal_rec where --yr = 2020 and sess in ("RA", "RC", "RE", "GA", "GC", "GE") and subsess = "" and (end_date > TODAY - 30 and beg_date < TODAY + 30)''' connection = get_connection(EARL) # print(q_validate_stuserv_rec) """ connection closes when exiting the 'with' block """ with connection: data_result = xsql( q_get_terms, connection, key=settings.INFORMIX_DEBUG ).fetchall() ret = list(data_result) for row in ret: i = row[0].strip() + ' ' + str(row[1]) # d2 = datetime.datetime(2020, 5, 20) session = i # session = row[0] hall = '' posted = '0,2' # """IMPORTANT! won't work if string has any spaces. NO # SPACES""" url = "https://carthage.datacenter.adirondacksolutions.com/" \ + API_server + "/apis/thd_api.cfc?" \ "method=housingASSIGNMENTS&" \ "Key=" + key + "&" \ "utcts=" + \ str(utcts) + "&" \ "h=" + hash_object.hexdigest() + "&" \ "TimeFrameNumericCode=" + session + "&" \ + "HALLCODE=" + hall \ + "&" + \ "Posted=" + posted # + "&" \ # "STUDENTNUMBER=" + "1501628" # + "&" \ # "HallCode=" + 'TOWR' # # "CurrentFuture=-1" + "&" \ # "Ghost=0" + "&" \ # NOTE: HALLCODE can be empty # + "&" \ # "HALLCODE=" + hall \ # \ ''' DEFINITIONS Posted: 0 returns only NEW unposted, 1 returns posted, as in out to our system 2 changed or cancelled PostAssignments: -1 will mark the record as posted. CurrentFuture: -1 returns only current and future Cancelled: -1 is for cancelled, 0 for not cancelled 'In theory, every room assignment in Adirondack should have a bill code''' # print("URL = " + url) # print("______") try: response = requests.get(url) response.raise_for_status() # print("______") # print(response.content) x = json.loads(response.content) # print(x) # print(x['DATA']) i = 5 except requests.exceptions.HTTPError as err: i += 1 if i == 5: print ("Http Error:", err) pass except requests.exceptions.ConnectionError as err: print ("Error Connecting:", err) pass except requests.exceptions.Timeout as err: print ("Timeout Error:", err) pass except requests.exceptions.RequestException as err: print ("OOps: Something Else", err) pass if not x['DATA']: # print("No new data found") pass else: # print(x['DATA']) room_file = settings.ADIRONDACK_TXT_OUTPUT + \ settings.ADIRONDACK_ROOM_ASSIGNMENTS + '.csv' room_archive = settings.ADIRONDACK_ROOM_ARCHIVED + \ settings.ADIRONDACK_ROOM_ASSIGNMENTS + \ datetimestr + '.csv' if os.path.exists(room_file): os.rename(room_file, room_archive) room_data = fn_encode_rows_to_utf8(x['DATA']) # Write header try: notify_flag = False fn_write_assignment_header(room_file) with open(room_file, 'a') as room_output: for i in room_data: try: if i[0] is None: # print("No ID") pass else: carthid = i[0] bldgname = i[1] adir_hallcode = i[2] floor = i[3] bed = i[5] room_type = i[6] occupancy = i[7] roomusage = i[8] timeframenumericcode = i[9] """Note: Checkout date is returning in the checkout field from the API rather than checkoutdate field""" checkin = i[10] checkedindate = i[10] checkout = i[12] checkedoutdate = i[13] po_box = i[14] po_box_combo = i[15] canceled = i[16] canceldate = i[17] cancelnote = i[18] cancelreason = i[19] ghost = i[20] posted = i[21] roomassignmentid = i[22] sess = i[9][:2] year = i[9][-4:] term = i[9] # occupants = i[7] off_camp_rsv_apr = '' bldg = fn_fix_bldg(i[2]) billcode = fn_get_bill_code(carthid, str(bldg), room_type, roomassignmentid, session, API_server, key) ''' Intenhsg can be: R = Resident, O = Off-Campus, C = Commuter This routine is needed because the adirondack hall codes match to multiple descriptions and hall descriptions have added qualifiers such as FOFF, MOFF, UNF, LOCA that are not available elsewhere using the API. Have to parse it to assign a generic room For non residents, we have a generic room for CX and a dummy room on the Adirondack side So we need two variables, on for Adirondack and one for CX. ''' adir_room = i[4] if bldg == 'CMTR': intendhsg = 'C' room = bldgname[ (bldgname.find('_') + 1) - len(bldgname):] elif bldg == 'OFF': intendhsg = 'O' room = bldgname[ (bldgname.find('_') + 1) - len(bldgname):] elif bldg == 'ABRD': intendhsg = 'O' room = bldgname[ (bldgname.find('_') + 1) - len(bldgname):] elif bldg == 'RMTE': intendhsg = 'C' off_camp_rsv_apr = 'Y' room = i[4] elif bldg == 'UN': intendhsg = 'R' room = bldgname[ (bldgname.find('_') + 1) - len(bldgname):] else: intendhsg = 'R' room = i[4] if posted == 2 and canceled == -1: billcode = 'NOCH' if canceled == -1 and cancelreason \ == 'Withdrawal': rsvstat = 'W' else: rsvstat = 'R' # print("write room output") csvwriter = csv.writer(room_output, quoting=csv.QUOTE_NONNUMERIC ) '''Need to write translated fields if csv is to be created''' csvwriter.writerow( [carthid, bldgname, bldg, floor, room, bed, room_type, occupancy, roomusage, timeframenumericcode, checkin, checkedindate, checkout, checkedoutdate, po_box, po_box_combo, canceled, canceldate, cancelnote, cancelreason, ghost, posted, roomassignmentid, billcode]) ''' Validate if the stu_serv_rec exists first update stu_serv_rec id, sess, yr, rxv_stat, intend_hsg, campus, bldg, room, bill_code ''' q_validate_stuserv_rec = ''' select id, sess, yr, rsv_stat, intend_hsg, campus, trim(bldg), trim(room), no_per_room, add_date, trim(bill_code), hous_wd_date, offcampus_res_appr from stu_serv_rec where yr = {2} and sess = "{1}" and id = {0}'''.format(carthid, sess, year) connection = get_connection(EARL) # print(q_validate_stuserv_rec) """ connection closes when exiting the 'with' block """ with connection: data_result = xsql( q_validate_stuserv_rec, connection, key=settings.INFORMIX_DEBUG ).fetchall() ret = list(data_result) if len(ret) != 0: # if ret is not None: # print("Stu Serv Rec Found") if billcode: # if billcode != 0 and str( # billcode) != '': """compare rsv_stat, intend_hsg, bldg, room, billcode -- Update only if something has changed""" # print("Record found " + # carthid) for row in ret: # print("Off campus = " # + str(row[12])) if row[3] != rsvstat \ or row[ 4] != intendhsg \ or row[6] != bldg \ or row[7] != room \ or row[ 10] != billcode: # print("Need to # update stu_serv_rec") q_update_stuserv_rec\ = ''' UPDATE stu_serv_rec set rsv_stat = ?, intend_hsg = ?, campus = ?, bldg = ?, room = ?, bill_code = ?, offcampus_res_appr = ? where id = ? and sess = ? and yr = ?''' q_update_stuserv_args = ( rsvstat, intendhsg, "MAIN", bldg, room, billcode, off_camp_rsv_apr, int(carthid), sess, int(year)) connection = \ get_connection( EARL) # print( # q_update_stuserv_rec) # print( # q_update_stuserv_args) """ connection closes when exiting the 'with' block """ with connection: cur = \ connection.cursor() cur.execute( q_update_stuserv_rec, q_update_stuserv_args) connection.commit() # connection.close() # continue # print("Updated") """If anything is written to database set this flag to True""" notify_flag = True # print("Mark room # as posted...") fn_mark_room_posted( carthid, adir_room, adir_hallcode, term, posted, roomassignmentid, API_server, key) else: # print("No change # needed in " # "stu_serv_rec") # print("Mark room # as posted...") fn_mark_room_posted( carthid, adir_room, adir_hallcode, term, posted, roomassignmentid, API_server, key) else: # print("490 - Bill code not # found") # print(bldg) # print(roomassignmentid) # print(carthid) # print( # "Error in # room_assignments.py - " # "Bill code not found # ID = " # + str(carthid) # + ", Building = " + # str(bldg) + # ", Room assignment ID = " # + str(roomassignmentid)) fn_write_error( "Error in " "room_assignments.py - " "Bill code not found ID " "= " + str(carthid) + ", Building = " + str( bldg) + ", Room assignment ID = " + str(roomassignmentid)) fn_send_mail( settings.ADIRONDACK_TO_EMAIL, settings.ADIRONDACK_FROM_EMAIL, "Error in " "room_assignments.py - " "Bill code not found ID " "= " + str(carthid) + ", Building = " + str( bldg) + ", Room assignment ID = " + str(roomassignmentid), "Adirondack Error") # go ahead and update else: """As of 1/30/20, we have decided that it makes sense to insert a skeleton stu_serv_rec here May need to deal with pulling from fall record for spring term, and deal with parking logic """ q_create_stu_serv_rec = '''INSERT INTO stu_serv_rec (id, sess, yr, rsv_stat, intend_hsg, campus, bldg, room, add_date, bill_code, offcampus_res_appr) VALUES ({0},'{1}', {2}, '{3}', '{4}', '{5}', '{6}', '{7}', '{8}', '{9}', '{10}') '''.format(carthid, sess, year, 'R', intendhsg, 'MAIN', bldg, room, checkedindate, billcode, off_camp_rsv_apr) # print(q_create_stu_serv_rec) connection = get_connection(EARL) with connection: cur = connection.cursor() cur.execute( q_create_stu_serv_rec) connection.commit() fn_mark_room_posted(carthid, room, bldg, term, posted, roomassignmentid, API_server, key) except Exception as e: print("Error in process " + repr(e)) # sqlstate = e.args[1] print(e.args) fn_write_error( "Error in room_assignments.py - file " "write: " + repr(e)) pass """Notify Student Billing of changes """ if run_mode == "auto": if notify_flag: # print("Notify Student accounts") fn_notify(room_file, EARL) room_output.close() except Exception as e: print("Error in file write " + repr(e)) fn_write_error( "Error in room_assignments.py - file write: " + repr(e)) fn_send_mail(settings.ADIRONDACK_TO_EMAIL, settings.ADIRONDACK_FROM_EMAIL, "Error in room_assignments.py - file " "write: " + repr(e), "Adirondack Error") pass # # # Remove this after testing - only for testing when no # # # recent changes are found via the API # # room_file = settings.ADIRONDACK_TXT_OUTPUT + \ # # settings.ADIRONDACK_ROOM_ASSIGNMENTS + '.csv' # # if run_mode == 'auto': # # fn_notify(room_file, EARL) except Exception as e: print( "Error in adirondack_room_assignments_api.py- Main: " + repr(e))
def main(): ########################################################################## # ==> python cc_adp_rec.py --database=train --test # ==> python cc_adp_rec.py --database=cars ########################################################################## # # Defines file names and directory location # if test: # adp_csv_output = "/home/dsullivan/djlabour/djlabour/testdata/" # else: adp_csv_output = settings.ADP_CSV_OUTPUT # print(adp_csv_output) # For testing use last file # new_adp_file = adp_csv_output + "ADPtoCXLast.csv" new_adp_file = adp_csv_output + "ADPtoCX.csv" adp_view_file = adp_csv_output + "adptocxview.csv" adp_diff_file = adp_csv_output + "different.csv" adptocx_reformatted = adp_csv_output + "ADPtoCX_Reformatted.csv" # First remove yesterdays file of updates if os.path.isfile(adp_diff_file): os.remove(adp_diff_file) try: # set global variable global EARL # determines which database is being called from the command line if database == 'cars': EARL = settings.INFORMIX_ODBC if database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN else: # # this will raise an error when we call get_engine() # below but the argument parser should have taken # care of this scenario and we will never arrive here. EARL = None # establish database connection # print(EARL) ################################################################# # STEP 0-- # Pull the file from the ADP FTP site # execute sftp code in production only ################################################################# if not test: file_download() ################################################################# # STEP 1-- # Get the most recent rows from the cc_adp_rec table and write them # to a csv file to locate Read files and write out differences ################################################################# fn_write_adp_header(adptocx_reformatted) ################################################################# # NEW NEW NEW NEW NEW NEW NEW NEW NEW NEW NEW NEW # STEP 2-- # Rewrite the ADP file formatted to match the CX constraints # on length and different coding and date format ################################################################# with codecs.open(new_adp_file, 'r', encoding='utf-8-sig') as f: d_reader = csv.DictReader(f, delimiter=',') for row in d_reader: fn_write_row_reformatted(adptocx_reformatted, row) f.close() # print("Created Reformatted file") ################################################################# # STEP 3-- # Instead of using the ADP last file for comparison, use instead # the data that is currently in cc_adp_rec so we know we are current ################################################################# fn_write_adp_header(adp_csv_output + "adptocxview.csv") connection = get_connection(EARL) with connection: data_result = xsql(CX_VIEW_SQL, connection, key=settings.INFORMIX_DEBUG).fetchall() ret = list(data_result) with open(adp_view_file, 'a') as file_out: csvWriter = csv.writer(file_out, delimiter=',', dialect='myDialect') for row in ret: csvWriter.writerow(row) file_out.close() # print("Created view file") ################################################################# # Read in both files and compare # the codecs function prevents the header from ADP getting # into the comparison - needed because of extra characters in header ################################################################# fn_write_header(adp_diff_file) with codecs.open(adptocx_reformatted, 'r', encoding='utf-8-sig') as t1, codecs.open( adp_view_file, 'r', encoding='utf-8-sig') as t2: newfile = t1.readlines() oldfile = t2.readlines() # print("Diff file created") # This uses sets to compare the two files # returns additions or changes in new but not in original bigb = set(newfile) - set(oldfile) with open(adp_diff_file, 'a') as file_out: for line_no, line in enumerate(bigb): # x = line.split(',') file_out.write(line) # close the files t1.close() t2.close() file_out.close() ################################################################# # STEP 4-- # Open differences file and start loop through records ################################################################# with open(adp_diff_file, 'r') as f: d_reader = csv.DictReader(f, delimiter=',') try: for row in d_reader: if row["job_title_code"] == "": # print('No Job for = {0}, ' # 'Fullname = {1}'.format(row["carth_id"], # row["payroll_name"])) pass else: # print('carthid = {0}, ' # 'Fullname = {1}'.format(row["carth_id"], # row["payroll_name"])) if row["carth_id"] == "": SUBJECT = 'No Carthage ID' BODY = "No Carthage ID for " + row['payroll_name'] fn_write_error("No Carthage ID for " + row['payroll_name']) fn_send_mail(settings.ADP_TO_EMAIL, settings.ADP_FROM_EMAIL, BODY, SUBJECT) elif row["file_number"] == "": fn_write_error("No ADP File Number for " + row['payroll_name']) SUBJECT = 'No ADP File Number' BODY = "No ADP File Number for " + \ row['payroll_name'] fn_send_mail(settings.ADP_TO_EMAIL, settings.ADP_FROM_EMAIL, BODY, SUBJECT) else: ################################################## # STEP 4a-- # Make sure record is not already in cc_adp_rec # Limitations on filtering the ADP report # allow rare cases # of identical rows in report. ################################################## # try: verifyqry = Q_CC_ADP_VERIFY(row) # print(verifyqry) # break connection = get_connection(EARL) with connection: data_result = xsql( verifyqry, connection, key=settings.INFORMIX_DEBUG).fetchall() ret = list(data_result) # print(ret) # if ret is None: if len(ret) == 0: # print("No Matching Record found - Insert") ############################################## # STEP 4b-- # Write entire row to cc_adp_rec table ############################################## try: rt = INS_CC_ADP_REC(row, EARL) # print(rt[0]) connection = get_connection(EARL) with connection: cur = connection.cursor() cur.execute(rt[0], rt[1]) except Exception as e: fn_write_error("Error in adptcx.py while " "inserting into cc_adp_rec " "Error = " + repr(e)) continue else: pass # print("Found Record - do not insert duplicate") except Exception as e: # print(repr(e)) fn_write_error("Error in cc_adp_rec.py Step 4, Error = " + repr(e)) fn_send_mail( settings.ADP_TO_EMAIL, settings.ADP_FROM_EMAIL, "Error in cc_adp_rec.py, at reading diff file. " "Error = " + repr(e), "Error in cc_adp_rec.py") f.close() except Exception as e: # print("Error in cc_adp_rec.py, Error = " + repr(e)) fn_write_error("Error in cc_adp_rec.py - Main: " + repr(e)) fn_send_mail(settings.ADP_TO_EMAIL, settings.ADP_FROM_EMAIL, "Error in cc_adp_rec.py, Error = " + repr(e), "Error in cc_adp_rec.py")
def main(): ########################################################################## # development server (bng), you would execute: # ==> python student_pictures.py --database=train --test # production server (psm), you would execute: # ==> python student_pictures.py --database=cars # without the --test argument ########################################################################## filepath = settings.ADIRONDACK_JPG_OUTPUT # print(filepath) temp_path = os.path.dirname(os.path.abspath(__file__)) + "/pictures/" # print(temp_path) try: # set global variable global EARL # determines which database is being called from the command line if database == 'cars': EARL = settings.INFORMIX_ODBC if database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN else: # this will raise an error when we call get_engine() # below but the argument parser should have taken # care of this scenario and we will never arrive here. EARL = None # print(PICTURE_ID_QUERY) connection = get_connection(EARL) # connection closes when exiting the 'with' block with connection: data_result = xsql(PICTURE_ID_QUERY, connection, key=settings.INFORMIX_DEBUG).fetchall() retID = list(data_result) if retID is None: SUBJECT = '[adirondack Application] failed' BODY = "SQL Query returned no data." # print(BODY) sendmail(settings.ADIRONDACK_TO_EMAIL, settings.ADIRONDACK_FROM_EMAIL, BODY, SUBJECT) else: # print("Query 1 successful") pass # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # Could not return lenel_earl fro some reason # Need to move this at some point # Wonder tho, if pictures are still on Lenel??? LENEL_EARL = 'DSN=MSSQL-LENEL;UID=C8Lenel;PWD=leneL8cvti' # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # LENEL_EARL = settings.LENEL_EARL print("EARL = " + LENEL_EARL) try: for row in retID: LENEL_PICTURE_ARG = row[0] # print("Query = " + LENEL_PICTURE_QUERY) # print("ARG = " + LENEL_PICTURE_ARG) try: # query blob data form the authors table conn = pyodbc.connect(LENEL_EARL) # if conn: # print("Connected to Lenel") result = conn.execute( LENEL_PICTURE_QUERY.format(int(LENEL_PICTURE_ARG))) for row1 in result: photo = row1[0] filename = str(LENEL_PICTURE_ARG) + ".jpg" # print(filename) # write blob data into a file write_file(photo, filepath + filename) result.close() conn.close() print("END LENEL") except ValueError: print("Value Error getting photo") except TypeError: print("Type Error getting photo") except Exception as e: if e.__class__ == 'pyodbc.DataError': print("DATA ERROR") pass # print("Pictures Done") except Exception as e: # print("Error getting photo " + repr(e)) SUBJECT = 'ADIRONDACK UPLOAD failed' BODY = 'Unable to PUT .zip file to ' \ 'adirondack server.\n\n{0}'.format(str(e)) fn_write_error( "Error in adirondack student_pictures.py, Error = " + repr(e)) sendmail(settings.ADIRONDACK_TO_EMAIL, settings.ADIRONDACK_FROM_EMAIL, BODY, SUBJECT) # Remove previous file if os.path.exists(filepath + "carthage_studentphotos.zip"): os.remove(filepath + "carthage_studentphotos.zip") # Create zip file # Can't create it in the Data directory # Put it in source directory then move it shutil.make_archive("carthage_studentphotos", 'zip', filepath) # print("Zip created") # Do I need to move it? shutil.move("carthage_studentphotos.zip", filepath) # print("Move?") # Clean up - remove .jpgs filelist = os.listdir(filepath) # print(filelist) for filename in filelist: try: if filename.endswith('.jpg'): # print(filepath + filename) os.remove(filepath + filename) except Exception as e: print(repr(e)) # print("cleanup done") # send file to SFTP Site.. # print(filepath + "carthage_studentphotos.zip") sftp_upload(filepath + "carthage_studentphotos.zip") except Exception as e: fn_write_error("Error in adirondack student_pictures.py, Error = " + repr(e)) SUBJECT = '[adirondack Application] Error' BODY = "Error in adirondack student_pictures.py, Error = " + repr(e) sendmail(settings.ADIRONDACK_TO_EMAIL, settings.ADIRONDACK_FROM_EMAIL, BODY, SUBJECT)
def main(): ''' main function ''' logger.error('skeletor is here') # Defines file names and directory location adirondackdata = ('{0}carthage_students.txt'.format( settings.ADIRONDACK_TXT_OUTPUT) ) try: # set global variable global EARL # determines which database is being called from the command line if database == 'cars': EARL = settings.INFORMIX_ODBC elif database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN else: print("database must be: 'cars' or 'train'") exit(-1) # -------------------------- # Create the txt file # print(ADIRONDACK_QUERY) connection = get_connection(EARL) # connection closes when exiting the 'with' block with connection: data_result = xsql( ADIRONDACK_QUERY, connection, key=settings.INFORMIX_DEBUG ).fetchall() # print(data_result) ret = list(data_result) if ret is None: SUBJECT = "[Adirondack] Application failed" BODY = "SQL Query returned no data." send_mail ( None, [settings.ADIRONDACK_TO_EMAIL,], SUBJECT, settings.ADIRONDACK_FROM_EMAIL, 'email/default.html', BODY, [settings.ADMINS[0][1],] ) else: fn_write_student_bio_header() # print("Query successful") # print(ret) with open(adirondackdata, 'w') as file_out: csvWriter = csv.writer(file_out, delimiter='|') encoded_rows = fn_encode_rows_to_utf8(ret) for row in encoded_rows: # for row in ret: # print(row) csvWriter.writerow(row) file_out.close() # send file to SFTP Site.. sftp_upload(adirondackdata) if test: SUBJECT = "[Adirondack] Student Bio data success" BODY = "Retreieved data and sent it via SFTP to the eater of trees." send_mail( None, [settings.ADIRONDACK_TO_EMAIL,], SUBJECT, settings.ADIRONDACK_FROM_EMAIL, 'email/default.html', BODY, [settings.ADMINS[0][1],] ) print('done') logger.error(BODY) except Exception as e: # print(str(e)) logger.error("Error in adirondack student_bio.py, Error = " + repr(e)) SUBJECT = '[Adirondack] Application Error' BODY = "Error in adirondack student_bio.py, Error = " + repr(e) send_mail ( None, [settings.ADIRONDACK_TO_EMAIL,], SUBJECT, settings.ADIRONDACK_FROM_EMAIL, 'email/default.html', BODY, [settings.ADMINS[0][1],] )
def main(): """Export data for various constituent types.""" # check for profile type will fail if not one of the four allowed types try: headers = HEADERS[who] except Exception: print("who must be: 'student', 'facstaff', 'alumni', or 'education'\n") print("who = {0}".format(who)) sys.exit(-1) suffix = '' if pseudo: suffix = '_pseudo' phile = os.path.join(settings.BASE_DIR, 'sql/grover', '{0}{1}.sql'.format( who, suffix, )) with open(phile) as incantation: sql = incantation.read() if test: print("who = {0}".format(who)) print("headers") print(headers) print("phile:") print(phile) print("sql = {0}".format(sql)) logger.debug("sql = %s", sql) sys.exit(-1) connection = get_connection() with connection: rows = xsql(sql, connection, key=settings.INFORMIX_DEBUG).fetchall() phile = r'{0}{1}.csv'.format(who, suffix) with open(phile, 'w', newline='') as csvfile: writer = csv.writer(csvfile, delimiter='|', quoting=csv.QUOTE_NONE, quotechar='') writer.writerow([head for head in headers]) char_remove = set([' ', '(', ')']) for row in rows: # sometimes the provisioning view will include an entity that was # just created and might not have a username just yet. if row.email: row.email = ''.join( [char for char in row.email if char not in char_remove], ) # grover's import app does not like trailing commas in the list. # python 3 returns an iterator from filter(), so we wrap it in list() # grover also wants double quotes and not single quotes so we # convert the list to json string with dumps() if who != 'facstaff': if who != 'education': concentration = list( filter(None, row.concentration.split(','))) row.concentration = json.dumps(concentration) majors = list(filter(None, row.majors.split(','))) row.majors = json.dumps(majors) minors = list(filter(None, row.minors.split(','))) row.minors = json.dumps(minors) # write the row writer.writerow(row) print('done. created file: {0}'.format(phile))
def main(): # set global variable global EARL # determines which database is being called from the command line if database == 'cars': EARL = settings.INFORMIX_ODBC if database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN else: # # this will raise an error when we call get_engine() # below but the argument parser should have taken # care of this scenario and we will never arrive here. EARL = None # establish database connection """To run: python misc_fees.py --database=train --test """ if test != "test": API_server = "carthage_thd_prod_support" key = settings.ADIRONDACK_API_SECRET else: API_server = "carthage_thd_test_support" key = settings.ADIRONDACK_TEST_API_SECRET # print(API_server) try: utcts = fn_get_utcts() hashstring = str(utcts) + key # Assumes the default UTF-8 hash_object = hashlib.md5(hashstring.encode()) datetimestr = time.strftime("%Y%m%d") timestr = time.strftime("%H%M") # print("GET TERMS") # Adirondack dataset bill_list = [] q_get_terms = '''select sess, yr, beg_date, end_date from acad_cal_rec where --yr = 2020 and sess in ("RA", "RC", "RE", "GA", "GC", "GE") and subsess = "" and (end_date > TODAY and beg_date < TODAY)''' connection = get_connection(EARL) # print(q_validate_stuserv_rec) """ connection closes when exiting the 'with' block """ with connection: data_result = xsql(q_get_terms, connection, key=settings.INFORMIX_DEBUG).fetchall() ret = list(data_result) if ret: """ Cleanup previous run CSV files """ files = os.listdir(settings.ADIRONDACK_TXT_OUTPUT) for f in files: ext = f.find(".csv") if (f.startswith("2010") or f.startswith("2011") or f.startswith("2031") or f.startswith("2040")): shutil.move( settings.ADIRONDACK_TXT_OUTPUT + f, settings.ADIRONDACK_TXT_OUTPUT + "ascii_archive/" + f[:ext] + "_" + timestr + f[ext:]) for row in ret: i = row[0].strip() + ' ' + str(row[1]) adirondack_term = i """Get data from Adirondack""" url = "https://carthage.datacenter.adirondacksolutions.com/" \ +API_server+"/apis/thd_api.cfc?" \ "method=studentBILLING&" \ "Key=" + key \ + "&" + "utcts=" + str(utcts) \ + "&" + "h=" + hash_object.hexdigest() \ + "&" + "TIMEFRAMENUMERICCODE=" + adirondack_term \ + "&" + "AccountCode=2010,2040,2011,2031" \ + "&" + "Exported=0" # + "&" + "STUDENTNUMBER=1566304" """ DEFINIIONS Exported: -1 exported will be included, 0 only non-exported ExportCharges: if -1 then charges will be marked as exported DO NOT mark exported here. Wait for later step """ # print("URL = " + url) response = requests.get(url) x = json.loads(response.content) """ Make sure no duplicate records get into the system Use the STUDENTBILLINGINTERNALID number - uniquie row id for each adirondack billing entry Store the numbers in a txt file Read that file into a list and IF the new data pulls the same ID number, pass through """ """ ------------------------------------------ Step 1 would be to build the list of items already written to a csv for the terms ------------------------------------------""" # Set up the file names for the duplicate check cur_file = settings.ADIRONDACK_TXT_OUTPUT + "billing_logs/" + \ adirondack_term.replace(" ","") + '_processed.csv' # cur_file = settings.ADIRONDACK_TXT_OUTPUT + 'billing_logs/' + # current_term + '_processed.csv' # last_file = settings.ADIRONDACK_TXT_OUTPUT # last_term + '_processed.csv' """Initialize a list of record IDs -- previously processed rows""" the_list = [] """ Make sure file for the current term has been created""" if os.path.isfile(cur_file): # print ("cur_file exists") fst = cur_file with open(fst, 'r') as ffile: csvf = csv.reader(ffile) # the [1:] skips header # File should have at least columns for term row ID next(ffile) for row in csvf: # This if statement traps for blank rows # if not ''.join(row).strip(): assign_id = int(row[16].strip()) the_list.append(assign_id) ffile.close() else: # print ("No file") fn_write_billing_header(cur_file) """ Step 2 would be to loop through the new charges returned from adirondack in the API query """ """ Note. Each account code must be a separate file for ASCII Post 2010 Improper Checkout 2011 Extended stay charge 2031 Recore 2040 Lockout fee Room rental fees are not for ASCII post and will not be calculated in Adirondack """ for i in x['DATA']: # As the csv is being created # Compare each new file's line ID adir_term = i[4] ascii_term = i[4][:2] + i[4][-2:] # Round the amount to 2 decimal places amount = '{:.2f}'.format(i[2]) bill_id = str(i[16]) bill_list.append(bill_id) stu_id = str(i[0]) item_date = i[1][-4:] + "-" + i[1][:2] + "-" + i[1][3:5] tot_code = str(i[6]) item_type = i[13] if adirondack_term == adir_term: """ here we look for a specific item""" """ FORMAT DATE FOR SQL""" chk_date = datetime.strptime(item_date, '%Y-%m-%d') new_date = datetime.strftime(chk_date, '%m/%d/%Y') """Make sure this charge is not already in CX""" x = fn_check_cx_records(tot_code, adir_term, new_date, stu_id, amount, EARL) if x == 0: pass # print("Item is not in CX database") else: print("WARNING: Matching item exist in CX database") continue """Make sure item was not pulled previously""" if int(bill_id) in the_list: # print("Item " + bill_id + " already in list") pass else: """Write the ASCII file and log the entry for future reference""" # print("Write to ASCII") rec = [] rec.append(i[1]) """Limit to 26 characters just in case""" tmpstr = str(i[5][:26]) descr = ''.join(filter(str.isalnum, tmpstr)) rec.append(descr.strip()) rec.append("1-003-10041") rec.append('{:.2f}'.format(i[2])) rec.append(stu_id) rec.append("S/A") rec.append(tot_code) rec.append(ascii_term) file_descr = item_type.replace(" ", "_") fee_file = settings.ADIRONDACK_TXT_OUTPUT + tot_code \ + "_" + file_descr + "_" \ + datetimestr + ".csv" with open(fee_file, 'a') as fee_output: csvwriter = csv.writer(fee_output) csvwriter.writerow(rec) fee_output.close() """Write record of item to PROCESSED list""" f = cur_file with open(f, 'a') as wffile: csvwriter = csv.writer(wffile) csvwriter.writerow(i) # print(bill_list) files = os.listdir(settings.ADIRONDACK_TXT_OUTPUT) csv_exists = False fils = [] """ Each time I find a record it writes or appends to the file the file will therefore have today as the modified date Logic: if TODAY is equal to Last modified file data then append that file to the list otherwise the file date would be older than today, meaning nothing has changed if the program does not run, then the record will still be in THD to pickup next time, it will make the change using TODAY as the date. """ for f in files: if (f.startswith("2010") or f.startswith("2011") or f.startswith("2031") or f.startswith("2040")): last_modified = time.ctime( os.path.getmtime(settings.ADIRONDACK_TXT_OUTPUT + f)) # print(last_modified) dtm = datetime.strptime(last_modified, "%a %b %d %H:%M:%S %Y") sdt = datetime.strftime(dtm, "%m/%d/%y") td = datetime.today() tds = datetime.strftime(td, "%m/%d/%y") # print(sdt) if tds == sdt: fils.append(f) csv_exists = True # print(fils) """Mark bill items as exported""" for bill_id in bill_list: # print(bill_id) fn_mark_bill_exported(bill_id, API_server, key) """When all done, email csv file""" """Needs to be outside the for loop""" if csv_exists == True: # print("EMAIL TO " + str(settings.ADIRONDACK_ASCII_EMAIL)) subject = 'Housing Miscellaneous Fees' body = 'There are housing fees to process via ASCII ' \ 'post' fn_sendmailfees_all_trms(settings.ADIRONDACK_ASCII_EMAIL, settings.ADIRONDACK_FROM_EMAIL, body, subject) except Exception as e: # print("Error in misc_fees.py - Main: " # + repr(e)) fn_write_error("Error in misc_fees.py - Main: " + repr(e))
def main(): """Barnes and Noble Upload.""" ########################################################################### # OpenSSH 7.0 and greater disable the ssh-dss (DSA) public key algorithm, # which B&N use for authentication on their servers, so you have to add # ssh-dss to the ssh/sftp command: # # -oHostKeyAlgorithms=+ssh-dss # # or add the following to the cron user's .ssh/config file: # # Host rex-sftp.bncollege.com # HostName rex-sftp.bncollege.com # HostKeyAlgorithms=+ssh-dss ########################################################################### # Defines file names and directory location # bn_course_fil = ('{0}carthage_students.txt'.format( # settings.ADIRONDACK_TXT_OUTPUT) # ) # bn_course_file = settings.BARNES_N_NOBLE_CSV_OUTPUT + "courses.csv" # bn_enr_fil = settings.BARNES_N_NOBLE_CSV_OUTPUT + "enrollments.csv" # bn_usr_fil = settings.BARNES_N_NOBLE_CSV_OUTPUT + "users.csv" # bn_zip_fil = settings.BARNES_N_NOBLE_CSV_OUTPUT + "carthage_bn" """To get the last query date from cache""" last_sql_date = cache.get('BN_Sql_date') # print(last_sql_date) bn_course_file = "courses.csv" bn_enr_fil = "enrollments.csv" bn_usr_fil = "users.csv" bn_zip_fil = "carthage_bncroster.zip" # /data2/www/data/barnesandnoble/enrollments/carthage_bncroster.zip"" # print(settings.BARNES_N_NOBLE_CSV_OUTPUT + bn_zip_fil) if path.exists(settings.BARNES_N_NOBLE_CSV_OUTPUT + bn_zip_fil): os.remove(settings.BARNES_N_NOBLE_CSV_OUTPUT + bn_zip_fil) """Create the headers for the three files""" fil = open(bn_course_file, 'w') fil.write("recordNumber,campus,school,institutionDepartment,term," "department,course,section,campusTitle,schoolTitle," "institutionDepartmentTitle,courseTitle," "institutionCourseCode,institutionClassCode," "institutionSubjectCodes,institutionSubjectsTitle," "crn,termTitle,termType,termStartDate,termEndDate," "sectionStartDate,sectionEndDate,classGroupId," "estimatedEnrollment" + "\n") fil.close() fil1 = open(bn_enr_fil, 'w') fil1.write("recordNumber,campus,school,institutionDepartment,term," "department,course,section,email,firstName,middleName," "lastName,userRole,sisUserId,includedInCourseFee," "studentFullPartTimeStatus,creditHours" + "\n") fil1.close() fil2 = open(bn_usr_fil, 'w') fil2.write("recordNumber,campus,school,email,firstName,middleName," "lastName,userRole,sisUserId" + "\n") fil2.close() try: # set global variable # global EARL # # determines which database is being called from the command line # if database == 'cars': EARL = settings.INFORMIX_ODBC # elif database == 'train': # EARL = settings.INFORMIX_ODBC_TRAIN # else: # print("database must be: 'cars' or 'train'") # exit(-1) crs_qry = COURSES connection = get_connection(EARL) # connection closes when exiting the 'with' block blank = "" with connection: data_result = xsql( crs_qry, connection, key=settings.INFORMIX_DEBUG ).fetchall() ret = list(data_result) if ret is None: # print("No result") SUBJECT = "[Barnes and Noble Crs Enr] Application failed" BODY = "Course Query returned no data." send_mail( None, settings.BARNES_N_NOBLE_TO_EMAIL, SUBJECT, settings.BARNES_N_NOBLE_FROM_EMAIL, 'email.html', BODY, ) else: # print(ret)cd cnt = 1 # print("Open file 1") fil = open(bn_course_file, 'a') for row in ret: # fil.write(row) campus = '"' + row[0] + '"' # school = '"' + row[1] + '"' school = '"' + blank + '"' institutionDepartment = row[2] term = '"' + row[3] + '"' department = '"' + row[4] + '"' course = '"' + row[5] + '"' SectionCode = '"' + row[6] + '"' campusTitle = '"' + row[7] + '"' # schoolTitle = '"' + row[8] + '"' schoolTitle = '"' + blank + '"' institutionDepartmentTitle = '"' + row[9] + '"' courseTitle = '"' + row[10].strip() + '"' institutionCourseCode = '"' + row[11] + '"' institutionClassCode = '"' + row[12] + '"' institutionSubjectCodes = '"' + row[13] + '"' institutionSubjectsTitle = '"' + row[14].strip() + '"' crn = '"' + row[15] + '"' termTitle = '"' + row[16] + '"' termType = '"' + row[17] + '"' termStartDate = '"' + fn_format_date(row[18]) + '"' termEndDate = '"' + fn_format_date(row[19]) + '"' sectionStartDate = '"' + fn_format_date(row[20]) + '"' sectionEndDate = '"' + fn_format_date(row[21]) + '"' classGroupId = '"' + row[22] + '"' estimatedEnrollment = str(row[23]) lin = str(cnt) + "," + campus + "," + school + "," + \ institutionDepartment + "," + term + "," + \ department + "," + course + "," + SectionCode + "," + \ campusTitle + "," + schoolTitle + "," + \ institutionDepartmentTitle + "," + courseTitle \ + "," + institutionCourseCode + "," + \ institutionClassCode + "," + institutionSubjectCodes \ + "," + institutionSubjectsTitle + "," + crn + "," + \ termTitle + "," + termType + "," + termStartDate \ + "," + termEndDate + "," + sectionStartDate + "," + \ sectionEndDate + "," + classGroupId + "," + \ estimatedEnrollment + "\n" fil.write(lin) cnt = cnt + 1 fil.close() # print("Close file 1") connection = get_connection(EARL) # connection closes when exiting the 'with' block with connection: data_result = xsql( USERS, connection, key=settings.INFORMIX_DEBUG ).fetchall() ret = list(data_result) if ret is None: # print("No result") SUBJECT = "[Barnes and Noble Crs Enr] Application failed" BODY = "User Query returned no data." send_mail( None, settings.BARNES_N_NOBLE_TO_EMAIL, SUBJECT, settings.BARNES_N_NOBLE_FROM_EMAIL, 'email.html', BODY, ) else: # print(ret) cnt = 1 # print("Open file 2") fil2 = open(bn_usr_fil, 'a') for row in ret: # print(row) campus = '"' + row[0] + '"' school = '"' + blank + '"' email = '"' + row[2] + '"' firstname = '"' + row[3] + '"' middlename = '"' + row[4] + '"' lastname = '"' + row[5] + '"' role = '"' + row[6].strip() + '"' username = '******' + str(row[8]) + '"' lin = str(cnt) + "," + campus + "," + school + "," + \ email + "," + firstname + "," + \ middlename + "," + lastname + "," + role + "," + \ username + "\n" # print(lin) fil2.write(lin) cnt = cnt + 1 fil2.close() # print("Close file 2") """Connect to Database""" connection = get_connection(EARL) # connection closes when exiting the 'with' block with connection: data_result = xsql( ENROLLMENTS, connection, key=settings.INFORMIX_DEBUG ).fetchall() ret = list(data_result) if ret is None: # print("No result") SUBJECT = "[Barnes and Noble Crs Enr] Application failed" BODY = "ENROLLMENTS Query returned no data." send_mail( None, settings.BARNES_N_NOBLE_TO_EMAIL, SUBJECT, settings.BARNES_N_NOBLE_FROM_EMAIL, 'email.html', BODY, ) else: # print(ret) cnt = 1 # print("Open file 3") fil3 = open(bn_enr_fil, 'a') for row in ret: # print(row) campus = '"' + row[0] + '"' school = '"' + blank + '"' inst_dept = '"' + row[2] + '"' term = '"' + row[3] + '"' dept = '"' + row[4] + '"' course = '"' + row[5] + '"' section = '"' + row[6].strip() + '"' email = '"' + row[7] + '"' firstname = '"' + row[8] + '"' middlename = '"' + row[9] + '"' lastname = '"' + row[10] + '"' role = '"' + row[11] + '"' userid = '"' + str(row[12]) + '"' includeinfee = '"' + row[13] + '"' fulltimestatus = '"' + row[14] + '"' credit_hours = '"' + str(row[15]) + '"' lin = str(cnt) + "," + campus + "," + school + "," + \ inst_dept + "," + term + "," + \ dept + "," + course + "," + \ section + "," + email + "," + \ firstname + "," + middlename + "," + \ lastname + "," + role + "," + userid + "," + \ includeinfee + "," + fulltimestatus + "," + \ credit_hours + "\n" # print(lin) fil3.write(lin) cnt = cnt + 1 fil3.close() # print("Close file 1") """Create Archive""" zf = zipfile.ZipFile(bn_zip_fil, mode='w') zf.write(bn_course_file) zf.write(bn_usr_fil) zf.write(bn_enr_fil) """Move Zip File""" shutil.move(bn_zip_fil, settings.BARNES_N_NOBLE_CSV_OUTPUT) """Send the file...""" cnopts = pysftp.CnOpts() cnopts.hostkeys = None xtrnl_connection = { 'host': settings.BARNESNOBLE_AIP_HOST, 'username': settings.BARNESNOBLE_AIP_USER, 'port': settings.BARNESNOBLE_AIP_PORT, 'private_key': settings.BARNESNOBLE_AIP_KEY, 'cnopts': cnopts, } try: with pysftp.Connection(**xtrnl_connection) as sftp: sftp.cwd('inbox') # print("Connected") remotepath = sftp.listdir() # print(remotepath) # phile = os.path.join(settings.BARNES_N_NOBLE_CSV_OUTPUT + bn_zip_fil) # print("Put " + phile) sftp.put(phile) sftp.close() # print("Remove temp csv files") os.remove(bn_usr_fil) os.remove(bn_course_file) os.remove(bn_enr_fil) except Exception as error: # print("Unable to PUT settings.BARNES_N_NOBLE_CSV_OUTPUT + " # "bn_zip_fil to Barnes and Noble " # "server.\n\n{0}".format(error)) SUBJECT = "[Barnes and Noble Crs Enr] Application failed" BODY = "Unable to PUT settings.BARNES_N_NOBLE_CSV_OUTPUT " \ + bn_zip_fil \ + " to Barnes and Noble server.\n\n{0}".format(error) send_mail(None, TO, SUBJECT(status='failed'), FROM, 'email.html', body, ) send_mail( None, settings.BARNES_N_NOBLE_TO_EMAIL, SUBJECT, settings.BARNES_N_NOBLE_FROM_EMAIL, 'email.html', BODY, ) #To set a new date in cache a = datetime.now() last_sql_date = a.strftime('%Y-%m-%d %H:%M:%S') cache.set('BN_Sql_date', last_sql_date) except Exception as e: print("Error in main: " + str(e)) SUBJECT = "[Barnes and Noble Crs Enr] Application failed" BODY = "Error" send_mail( None, settings.BARNES_N_NOBLE_TO_EMAIL, SUBJECT, settings.BARNES_N_NOBLE_FROM_EMAIL, 'email.html', BODY, )
def find_changes(): try: """ --------GET STUDENTS WITH A STATUS CHANGE -----------------""" """ """ statquery = '''select O.id, O.acst, O.audit_event, O.audit_timestamp, N.id, N.acst, N.audit_event, N.audit_timestamp from cars_audit:prog_enr_rec N left join cars_audit:prog_enr_rec O on O.id = N.id and O.acst != N.acst and O.audit_event = 'BU' where N.audit_event != 'BU' and N.audit_timestamp > TODAY - 1 and N.audit_timestamp = O.audit_timestamp ''' connection = get_connection(EARL) with connection: data_result = xsql( statquery, connection, key=settings.INFORMIX_DEBUG ).fetchall() # ret = list(data_result) # for i in ret: # print(str(i[0]) + " " + i[1] + " " + i[5]) # Look for student and status in local table # Else look for student and status at BB via API # Add to BB if necessary # Add or update status in BB # Update local table if necessary except Exception as e: print("Error in main: " + str(e)) # fn_write_error("Error in misc_fees.py - Main: " # + e.message) def write_bb_id(): try: """ ************************************** ************************************** ************************************** Here I need to get the local database stuff added """ # print(settings.SERVER_URL) # print(settings.DATABASES['default']['NAME']) # nm = settings.DATABASES['default']['NAME'] # print(settings.MSSQL_EARL) # # try: # userID = 'brahman' # """This works if I can figure out how to find the right # table and schema""" # cnxn = pyodbc.connect(settings.MSSQL_EARL) # for SQLServer, you must use single quotes in the SQL incantation, # otherwise it barfs for some reason # sql = "SELECT * FROM fwk_user" # sql = "SELECT table_name FROM information_schema.tables" # "WHERE table_schema = " + nm # table_schema # sql = "select table_name from " \ # "information_schema.tables where table_type = 'BASE TABLE' " \ # "and table_schema not in ('information_schema','mysql', " \ # "'performance_schema','sys') order by table_name;" # cursor.execute(sql) # rows = cursor.fetchall() # cursor.close() # # return row[5] # for i in rows: # print(i) # except: # return None # try: # cnx = mysql.connector.connect( # user=settings.DATABASES['default']['USER'], # password=settings.DATABASES['default']['PASSWORD'], # host=settings.DATABASES['default']['HOST'], # database=settings.DATABASES['default']['NAME'] # ) # # except Exception as e: # # if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: # # print("Something is wrong with your user name or password") # # elif err.errno == errorcode.ER_BAD_DB_ERROR: # # print("Database does not exist") # # else: # print(str(e)) # else: # cnx.close() """ For testing and development... qry = "INSERT INTO cx_sandbox:raisers_edge_id_match (id, re_id, fullname, category, value, date_added, date_updated, comment) VALUES (1534657, 20369, 'Bob Amico', 'Student Status', 'Administrator', '2019-11-13', '2019-11-21', 'Testing an add');" connection = get_connection(EARL) with connection: result = xsql(qry, connection, key=settings.INFORMIX_DEBUG ).execute """ """ ************************************** ************************************** ************************************** """ """ --------GET THE BLACKBAUD CONSTITUENT ID-----------------""" """ I will either have a list of students in a csv file or possibly in a to be determined database That way I can get the blackbaud internal id en masse and not need to make multiple calls based on the carthage ID I may also look to see if the student status has changed in CX """ # #---------------------------------------- with open("id_list.csv", 'r') as id_lst: reed = csv.reader(id_lst, delimiter=',') for row in reed: # print(row) const_id = row[1] # # # First, we have to get the internal ID from blackbaud for # # the constituent # const_id = get_constituent_id(current_token, 1534657) print("Constituent id = " + str(const_id)) except Exception as e: print("Error in main: " + str(e)) # fn_write_error("Error in misc_fees.py - Main: " # + e.message)
def main(): # It is necessary to create the boto3 client early because the call to # the Informix database will not allow it later. client = boto3.client('s3') ########################################################################## # development server (bng), you would execute: # ==> python buildcsv.py --database=train --test # production server (psm), you would execute: # ==> python buildcsv.py --database=cars # without the --test argument ########################################################################## # set date and time to be added to the filename datestr = datetime.now().strftime("%Y%m%d") # set date and time to be added to the archive filename datetimestr = time.strftime("%Y%m%d%H%M%S") # Defines file names and directory location handshakedata = ('{0}users.csv'.format(settings.HANDSHAKE_CSV_OUTPUT)) # set archive directory archived_destination = ('{0}users-{1}.csv'.format( settings.HANDSHAKE_CSV_ARCHIVED, datetimestr)) try: # set global variable global EARL # determines which database is being called from the command line if database == 'cars': EARL = settings.INFORMIX_ODBC if database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN else: # this will raise an error when we call get_engine() # below but the argument parser should have taken # care of this scenario and we will never arrive here. EARL = None # # Archive # Check to see if file exists, if not send Email if os.path.isfile(handshakedata) != True: # there was no file found on the server SUBJECT = '[Handshake Application] failed' BODY = "There was no .csv output file to move." fn_send_mail(settings.HANDSHAKE_TO_EMAIL, settings.HANDSHAKE_FROM_EMAIL, BODY, SUBJECT) fn_write_error("There was no .csv output file to move.") else: # print("Archive test") # rename and move the file to the archive directory shutil.copy(handshakedata, archived_destination) #-------------------------- # Create the csv file # Write header row with open(handshakedata, 'w') as file_out: csvWriter = csv.writer(file_out) csvWriter.writerow([ "email_address", "username", "auth_identifier", "card_id", "first_name", "last_name", "middle_name", "preferred_name", "school_year_name", "primary_education:education_level_name", "primary_education:cumulative_gpa", "primary_education:department_gpa", "primary_education:primary_major_name", "primary_education:major_names", "primary_education:minor_names", "primary_education:college_name", "primary_education:start_date", "primary_education:end_date", "primary_education:currently_attending", "campus_name", "opt_cpt_eligible", "ethnicity", "gender", "disabled", "work_study_eligible", "system_label_names", "mobile_number", "assigned_to_email_address", "athlete", "veteran", "hometown_location_attributes:name", "eu_gdpr_subject" ]) file_out.close() # Query CX and start loop through records connection = get_connection(EARL) # connection closes when exiting the 'with' block # print(HANDSHAKE_QUERY) with connection: data_result = xsql(HANDSHAKE_QUERY, connection, key=settings.INFORMIX_DEBUG).fetchall() ret = list(data_result) if ret is None: # print("Sql error") SUBJECT = '[Handshake Application] failed' BODY = "SQL Query returned no data." fn_send_mail(settings.HANDSHAKE_TO_EMAIL, settings.HANDSHAKE_FROM_EMAIL, BODY, SUBJECT) else: with open(handshakedata, 'a') as file_out: csvWriter = csv.writer(file_out) # encoded_rows = encode_rows_to_utf8(ret) for row in ret: # for row in encoded_rows: csvWriter.writerow(row) file_out.close() # # Send the file to Handshake via AWS bucket_name = settings.HANDSHAKE_BUCKET object_name = (datestr + '_users.csv') local_file_name = settings.HANDSHAKE_CSV_OUTPUT + 'users.csv' remote_folder = settings.HANDSHAKE_S3_FOLDER key_name = remote_folder + '/' + object_name # print("Filename = " + local_file_name + ", Bucket = " + bucket_name # + ", Key = " + key_name) if not test: # print("Upload the file") try: client.upload_file(Filename=local_file_name, Bucket=bucket_name, Key=key_name) # print("Upload file") # THIS IS WHAT IT SHOULD LOOK LIKE - IT WORKS DO NOT LOSE! # client.upload_file(Filename='20190404_users.csv', # Bucket='handshake-importer-uploads', # # Key='importer-production-carthage/20190404_users.csv') # print("Should be a successful upload") except boto3.exceptions.S3UploadFailedError as e: # logging.error(e) # print(e) fn_write_error( "Error in handshake buildcsv.py S3UploadFailedError - " "Error = " + repr(e)) except botocore.exceptions.ClientError as e: if e.response['Error']['Code'] == "404": # logging.error(e) print("The object does not exist.") fn_write_error( "Error in handshake buildcsv.py Boto error - " "object does not exist, " "Unknown error in aws.p" "Error = " + repr(e)) else: # print("Unknown error in aws.py " + e.message) fn_write_error( "Error in handshake buildcsv.py fn_upload_file, " "Unknown error in aws.p" "Error = " + repr(e)) except Exception as e: # print("Error in fn_upload_file = " + e.message + e.__str__()) fn_write_error( "Error in handshake buildcsv.py fn_upload_file, Error = " + repr(e)) else: print("build but do not upload") pass except Exception as e: fn_write_error("Error in handshake buildcsv.py, Error = " + repr(e)) SUBJECT = '[Handshake Application] Error' BODY = "Error in handshake buildcsv.py, Error = " + repr(e) fn_send_mail(settings.HANDSHAKE_TO_EMAIL, settings.HANDSHAKE_FROM_EMAIL, BODY, SUBJECT)
def main(): """Barnes and Noble Upload.""" ########################################################################### # OpenSSH 7.0 and greater disable the ssh-dss (DSA) public key algorithm, # which B&N use for authentication on their servers, so you have to add # ssh-dss to the ssh/sftp command: # # -oHostKeyAlgorithms=+ssh-dss # # or add the following to the cron user's .ssh/config file: # # Host sftp.bncollege.com # HostName sftp.bncollege.com # HostKeyAlgorithms=+ssh-dss ########################################################################### datetimestr = time.strftime('%Y%m%d%H%M%S') sqldict = { 'AR100': 'stu_acad_rec_100', 'AR200': 'stu_acad_rec_200', 'EXENRCRS': 'exenrcrs', } ########################################################################### # Dict Value stu_acad_rec_100 selects active students and sets budget # limit for export (books = '100' & $3000.00) # # Dict Value stu_acad_rec_200 selects active students and sets budget # limit for export (supplies = '200' & $50.00) # # Dict Value 'EXENCRS' selects all current and future course-sections # (sec_rec) and instructor for Bookstore to order ISBN inventory ########################################################################### for name, incantation in sqldict.items(): phile = os.path.join( BASE_DIR, 'sql/barnesandnoble/{0}.sql'.format(incantation), ) with open(phile) as sql_file: sqldict[name] = sql_file.read() cnopts = pysftp.CnOpts() cnopts.hostkeys = None # sFTP connection information for Barnes and Noble 1 xtrnl_connection1 = { 'host': settings.BARNESNOBLE1_HOST, 'username': settings.BARNESNOBLE1_USER, 'password': settings.BARNESNOBLE1_PASS, 'port': settings.BARNESNOBLE1_PORT, 'cnopts': cnopts, } # sFTP connection information for Barnes and Noble 2 xtrnl_connection2 = { 'host': settings.BARNESNOBLE2_HOST, 'username': settings.BARNESNOBLE2_USER, 'password': settings.BARNESNOBLE2_PASS, 'port': settings.BARNESNOBLE2_PORT, 'cnopts': cnopts, } for key, sql in sqldict.items(): if DEBUG: print(key) # print(sql) with get_connection() as connection: rows = xsql(sql, connection, key=INFORMIX_DEBUG).fetchall() if rows: # set directory and filename to be stored filename = ( '{0}{1}.csv'.format(settings.BARNESNOBLE_CSV_OUTPUT, key) ) # set destination path and new filename to which it # will be renamed when archived archive_destination = ('{0}{1}_{2}_{3}.csv'.format( settings.BARNESNOBLE_CSV_ARCHIVED, 'CCBAK', key, datetimestr, )) # create .csv file with open(filename, 'w') as csvfile: output = csv.writer(csvfile) # write header row to file if DEBUG: # write header row for (AR100, AR200) if key in {'AR100', 'AR200'}: output.writerow([ 'StudentID', 'Elastname', 'Efirstname', 'Xmiddleinit', 'Xcred_limit', 'EProviderCode', 'Ebegdate', 'Eenddate', 'Eidtype', 'Erecordtype', 'Eaccttype', ]) else: # write header row for EXENCRS output.writerow([ 'bnUnitNo', 'bnTerm', 'bnYear', 'bnDept', 'bnCourseNo', 'bnSectionNo', 'bnProfName', 'bnMaxCapcty', 'bnEstEnrlmnt', 'bnActEnrlmnt', 'bnContdClss', 'bnEvngClss', 'bnExtnsnClss', 'bnTxtnetClss', 'bnLoctn', 'bnCourseTitl', 'bnCourseID', ]) for row in rows: output.writerow(row) else: # no rows print('No values in list') # renaming old filename to newfilename and move to archive location shutil.copy(filename, archive_destination) # end loop on rows # set local path {/data2/www/data/barnesandnoble/} source_dir = ('{0}'.format(settings.BARNESNOBLE_CSV_OUTPUT)) # set local path and filenames # variable == /data2/www/data/barnesandnoble/{filename.csv} file_ar100 = '{0}AR100.csv'.format(source_dir) file_ar200 = '{0}AR200.csv'.format(source_dir) file_exencrs = '{0}EXENRCRS.csv'.format(source_dir) # for final email status success = True # sFTP PUT moves the EXENCRS.csv file to the Barnes & Noble server 1 try: with pysftp.Connection(**xtrnl_connection1) as sftp: if DEBUG: print(file_exencrs) sftp.put(file_exencrs, preserve_mtime=True) # deletes original file from our server os.remove(file_exencrs) except Exception as error: success = False body = """ Unable to PUT EXENCRS.csv to Barnes and Noble server.\n\n{0} """.format(error) send_mail( None, TO, SUBJECT(status='failed'), FROM, 'email.html', body, ) if DEBUG: print(error) # sFTP PUT moves the AR100.csv file to the Barnes & Noble server 2 try: with pysftp.Connection(**xtrnl_connection2) as sftp_ar100: if DEBUG: sftp_ar100.chdir('TestFiles/') print(file_ar100) sftp_ar100.put(file_ar100, preserve_mtime=True) sftp_ar100.chdir('ToBNCB/') sftp_ar100.put(file_ar100, preserve_mtime=True) # deletes original file from our server os.remove(file_ar100) except Exception as error_ar100: success = False body = """ Unable to PUT AR100.csv to Barnes and Noble server.\n\n{0} """.format(error_ar100) send_mail( None, TO, SUBJECT(status='failed'), FROM, 'email.html', body, ) if DEBUG: print(error_ar100) # sFTP PUT moves the AR200.csv file to the Barnes & Noble server 2 try: with pysftp.Connection(**xtrnl_connection2) as sftp_ar200: sftp_ar200.put(file_ar200, preserve_mtime=True) sftp_ar200.chdir('ToBNCB/') sftp_ar200.put(file_ar200, preserve_mtime=True) # deletes original file from our server os.remove(file_ar200) except Exception as error_ar200: success = False body = """ Unable to PUT AR200.csv to Barnes and Noble server.\n\n{0} """.format(error_ar200) send_mail( None, TO, SUBJECT(status='failed'), FROM, 'email.html', body, ) if DEBUG: print(error_ar200) # sFTP upload complete send success message if success: body = 'The Barnes and Noble files were successfully uploaded.' subject = SUBJECT(status='success') send_mail(None, TO, subject, FROM, 'email.html', body)
def main(): """Send all student, adult, facstaff records to everbridge.""" # determines which database is being called from the command line if database == 'cars': earl = settings.INFORMIX_ODBC elif database == 'sandbox': earl = settings.INFORMIX_ODBC_SANDBOX elif database == 'train': earl = settings.INFORMIX_ODBC_TRAIN else: print('invalid database name: {0}'.format(database)) sys.exit() for key in ('students', 'adult', 'facstaff'): sql_file = os.path.join( settings.BASE_DIR, 'sql/everbridge/{0}.sql'.format(key), ) with open(sql_file) as incantation: sql = incantation.read() if limit: sql += 'LIMIT {0}'.format(limit) if test: print('key = {0}, sql = {1}'.format(key, sql)) badmatches = [] with get_connection(earl) as connection: rows = xsql(sql, connection, key=settings.INFORMIX_DEBUG).fetchall() if rows: if test: print("rows {0}".format(len(rows))) filename = ('{0}{1}_upload_{2}.csv'.format( settings.EVERBRIDGE_CSV_OUTPUT, key, time.strftime('%Y%m%d%H%M%S'), )) with open(filename, 'w') as csv_file: output = csv.writer( csv_file, dialect='excel', lineterminator='\n', ) if key == 'facstaff': # write header row for FacStaff output.writerow(settings.EVERBRIDGE_FACSTAFF_HEADERS) else: # write header row for Student and Adult output.writerow(settings.EVERBRIDGE_STUDENT_HEADERS) for row in rows: if row.customvalue1: row.customvalue1 = row.customvalue1.strip() output.writerow(row) if test: print("row = \n{0}".format(row)) # checking for Bad match in either students or facstaff if row and ((row.customvalue1 and 'Bad match:' in row.customvalue1) \ or (row.customvalue2 and 'Bad match:' in row.customvalue2)): badmatches.append(""" {0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}, {10} {11} {12}, {13}, {14}, {15}, {16}, {17}, {18}, {19} {20}\n\n """.format( row.lastname, row.firstname, row.middleinitial, row.suffix, row.externalid, row.country, row.businessname, row.recordtype, row.phone1, row.phonecountry1, row.emailaddress1, row.emailaddress2, row.sms1, row.sms1country, row.customfield1, row.customvalue1, row.customfield2, row.customvalue2, row.customfield3, row.customvalue3, row.end, )) badmatches_table = ''.join(badmatches) if test: print("badmatches = \n{0}".format(badmatches)) if badmatches: if test: print("badmatches_table = \n{0}".format( badmatches_table, )) print("length of badmatches = {0}.".format( len(badmatches), )) body = """ A bad match exists in the file we are sending to Everbridge.\n\n{0}\n\n Bad match records: {1} """.format(badmatches_table, len(badmatches)) send_mail( None, TO, '[Everbridge] Bad match', FROM, 'email.html', body, ) else: print('No bad matches found.') if not test: # SFTP the CSV try: print('sftp attempt') print(filename) # go to our storage directory on the server os.chdir(settings.EVERBRIDGE_CSV_OUTPUT) cnopts = pysftp.CnOpts() cnopts.hostkeys = None xtrnl_connection = { 'host': settings.EVERBRIDGE_HOST, 'username': settings.EVERBRIDGE_USER, 'private_key': settings.EVERBRIDGE_PKEY, 'cnopts': cnopts, } with pysftp.Connection(**xtrnl_connection) as sftp: sftp.chdir("replace/") print("current working directory: {0}".format( sftp.getcwd(), )) sftp.put(filename, preserve_mtime=True) print("file uploaded:") for phile in sftp.listdir(): print(phile) print(str(sftp.lstat(phile))) sftp.close() print("sftp put success: {0}".format(key)) except Exception as error: print('sftp put fail [{0}]: {1}'.format(key, error)) body = """ Unable to PUT upload to Everbridge server.\n\n{0} """.format(error) send_mail( None, TO, '[Everbridge SFTP] {0} failed'.format(key), FROM, 'email.html', body, ) else: print("TEST: no sftp") else: print("No results from the database for {0}".format(key)) print("Done")
def main(): """Fetch mugshots from CMS.""" root = settings.LIVEWHALE_API_URL connection = get_connection() sql = """ SELECT * FROM provisioning_vw WHERE faculty IS NOT NULL OR staff IS NOT NULL ORDER BY lastname """ with connection: users = xsql(sql, connection, key=settings.INFORMIX_DEBUG).fetchall() for user in users: earl = '{0}/live/json/profiles/search/{1}/'.format(root, user.username) response_search = requests.get(url=earl) json_search = response_search.json() if json_search: print(earl) email = '{0}@carthage.edu'.format(user.username) print('{0}'.format(email)) for search in json_search: if isinstance(search.get('profiles_149'), list): p149 = search.get('profiles_149')[0].strip() elif search.get('profiles_149'): p149 = search.get('profiles_149').strip() else: p149 = None status = ((search.get('profiles_37') and search.get('profiles_37').strip() == email) or (p149 == email) or (search.get('profiles_80') and search.get('profiles_80').strip() == email) or (search.get('profiles_45') and search.get('profiles_45')[0].strip() == email)) if status: earl = '{0}/live/profiles/{1}@JSON'.format( root, search['id']) print(earl) response_profile = requests.get(url=earl) profile = response_profile.json() if profile.get('parent'): earl = '{0}/live/profiles/{1}@JSON'.format( root, profile['parent'], ) print(earl) response_parent = requests.get(url=earl) profile = response_parent.json() if profile.get('thumb'): listz = profile['thumb'].split('/') listz[8] = '300' listz[0] = 'https:' new_listz = listz[:9] new_listz.append(listz[-1]) profile['thumbnail'] = '/'.join(new_listz) # print(profile['thumbnail']) local_phile = '/data2/www/data/profiles/{0}.jpg'.format( user.id) thumb = profile.get('thumbnail') if thumb: print(local_phile) with requests.get(thumb, stream=True) as request: request.raise_for_status() with open(local_phile, 'wb') as phile: for chunk in request.iter_content( chunk_size=8192): phile.write(chunk) else: print(search)
def main(): ''' main function ''' global EARL # if test: # print("this is a test") # logger.debug("debug = {}".format(test)) # else: # print("this is not a test") # set global variable # determines which database is being called from the command line # if database == 'cars': # EARL = settings.INFORMIX_ODBC if database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN elif database == 'sandbox': EARL = settings.INFORMIX_ODBC_TRAIN else: # # this will raise an error when we call get_engine() # below but the argument parser should have taken # care of this scenario and we will never arrive here. EARL = None # establish database connection if test != "test": API_server = "carthage_thd_prod_support" key = settings.ADIRONDACK_API_SECRET else: API_server = "carthage_thd_test_support" key = settings.ADIRONDACK_TEST_API_SECRET """+++++++++++++++++++++++++++++++++++++++++++++++++""" """This will look for records in the stu_acad_rec that do not exist in the stu_serv_rec and will create a basic entry Incoming first time students will get a basic entry w/o bldg, room, parking,bill code, meal plan Should start looking Nov 1 for upcoming spring term and March 15 for upcoming fall term Returning students will get a basic minimal entry for the fall term For the spring term, starting March 15 we will want to copy the info from the fall term with the possible exception of the parking entry. That gets billed only for the year, not for the second semester. """ print(EARL) ret = fn_set_term_vars() print(ret) last_sess = ret[0] last_yr = ret[1] target_sess = ret[2] target_yr = ret[3] target_yr = 2021 """For Spring session, we need to collect info if it exists from the fall stu serv rec""" if target_sess == 'RC': cur_ssr_sql = get_fall_to_spring(target_sess, target_yr) """For Fall session, we do not need to know anything about the spring stu_serv_rec and there shouldn't be any First Time Frosh""" else: cur_ssr_sql = get_spring_to_fall(target_sess, target_yr) # print(cur_ssr_sql) connection = get_connection(EARL) """ connection closes when exiting the 'with' block """ with connection: data_result = xsql(cur_ssr_sql, connection, key=settings.INFORMIX_DEBUG).fetchall() cur_ssr = list(data_result) # print(cur_ssr) if len(cur_ssr) != 0: for row in cur_ssr: print('----------------') print("Stu Serv Rec needed for " + str(row[0])) # print(row) carth_id = row[0] # stu_cl = row[9] # earn_hrs = row[6] """Fall term is always a clean insert - no parking info, those will come later via ???""" if target_sess == 'RA': print("clean insert - no need to use last term") insSql = insert_ssr(carth_id, target_sess, target_yr, "", "", "", "R", "R", EARL) # print(insSql) else: print("search previous term stu_serv_rec") """This query will find the prior stu_serv_rec if it exists""" last_ssr_sql = get_last_ssr(carth_id, last_yr, last_sess) # print(last_ssr_sql) connection = get_connection(EARL) """ connection closes when exiting the 'with' block """ with connection: data_result = xsql(last_ssr_sql, connection, key=settings.INFORMIX_DEBUG).fetchall() last_ssr = list(data_result) if len(last_ssr) != 0: print("Stu Serv Rec Found") print("Can use previous term") for r in last_ssr: billcode = r[9] bldg = r[5] room = r[6] intdhsg = r[3] rsvstat = r[10] # mealplan = r[7] # parkloc = r[8] """Here I need something to decipher the existing entry.for parking""" # print ("Insert " + str(carth_id), target_sess, # str(target_yr), bldg, room, billcode, # intdhsg, rsvstat) # x = insert_ssr(carth_id, target_sess, target_yr, bldg, # room, billcode, intdhsg, rsvstat, EARL) # print(x) else: print("No prior rec - insert clean") # x = insert_ssr(carth_id, target_sess, target_yr, # "UN", "UN", "", "R", "R", EARL) # print(x) else: print("Nothing to do")
def main(): try: """ Term will be RA + Current year One big push for returning students for RC term happens in December Only returning will be in the system, no need to screen out frosh Push again June 30 and July 30 for RC term (will include frosh) Aug 1 start automation for fall term Stop automation for RC on last day of class - appr May 20 May 1, June 30, July 30 December for upcoming term From Aug to Dec, grab all RA current year From Jan to May 1 grab all RC current year On MAY 1, grab all RA current year on June 30 grab all RA current year On third wednesday in December grab all RC Next On Next day in Dec, go back to RA Current Only options are RC20xx and RA20xx, so I only need to determine which year to pass during each time frame. Question is, for spring housing, will both RA and RC need to be dealt with? """ """ This is the command needed to run the script python compare_systems.py --database=train --test Must specify the database, whether testing or live and whether user input is required """ # set global variable global EARL # determines which database is being called from the command line if database == 'cars': EARL = settings.INFORMIX_ODBC if database == 'train': EARL = settings.INFORMIX_ODBC_TRAIN else: # # this will raise an error when we call get_engine() # below but the argument parser should have taken # care of this scenario and we will never arrive here. EARL = None # establish database connection if test != "test": API_server = "carthage_thd_prod_support" key = settings.ADIRONDACK_API_SECRET else: API_server = "carthage_thd_test_support" key = settings.ADIRONDACK_TEST_API_SECRET # print(API_server) # print(key) # print(EARL) utcts = fn_get_utcts() """Seconds from UTC Zero hour""" hashstring = str(utcts) + key """Assumes the default UTF-8""" hash_object = hashlib.md5(hashstring.encode()) datetimestr = time.strftime("%Y%m%d%H%M%S") """Get the current term""" connection = get_connection(EARL) # connection closes when exiting the 'with' block with connection: data_result = xsql(Q_GET_TERM, connection, key=settings.INFORMIX_DEBUG).fetchall() ret = list(data_result) # print(Q_GET_TERM) # print(ret) if ret is None: fn_write_error( "Error in room_assignments.py - Main: No term found ") fn_send_mail( settings.ADIRONDACK_TO_EMAIL, settings.ADIRONDACK_FROM_EMAIL, "Error in room_assignments.py - Main: No term " "found ", "Adirondack Error") quit() else: for row in ret: print(row[0]) session = row[0] hall = '' posted = '1' """IMPORTANT! won't work if string has any spaces. NO SPACES""" url = "https://carthage.datacenter.adirondacksolutions.com/" \ + API_server + "/apis/thd_api.cfc?" \ "method=housingASSIGNMENTS&" \ "Key=" + key + "&" \ "utcts=" + \ str(utcts) + "&" \ "h=" + hash_object.hexdigest() + "&" \ "TimeFrameNumericCode=" + session + "&" \ "Posted=" + posted \ + "&" \ "HALLCODE=" + hall \ + "& GHOST=0" # + "&" \ # "STUDENTNUMBER=" + "1374557" # # "CurrentFuture=-1" + "&" \ # "Ghost=0" + "&" \ # NOTE: HALLCODE can be empty # + "&" \ # "HallCode=" + 'SWE' # \ ''' DEFINITIONS Posted: 0 returns only NEW unposted, 1 returns posted, as in out to our system 2 changed or cancelled PostAssignments: -1 will mark the record as posted. CurrentFuture: -1 returns only current and future Cancelled: -1 is for cancelled, 0 for not cancelled 'In theory, every room assignment in Adirondack should have a bill code''' print("URL = " + url) # print("______") i = 1 while i < 5: try: response = requests.get(url) response.raise_for_status() # print("______") # print(response.content) x = json.loads(response.content) # print(x) # print(x['DATA']) i = 5 except requests.exceptions.HTTPError as err: i += 1 if i == 5: print("Http Error:", errh) pass except requests.exceptions.ConnectionError as err: print("Error Connecting:", err) pass except requests.exceptions.Timeout as err: print("Timeout Error:", err) pass except requests.exceptions.RequestException as err: print("OOps: Something Else", err) pass if not x['DATA']: # print("No new data found") pass else: rowct = len(x['DATA']) print(rowct) room_data = fn_encode_rows_to_utf8(x['DATA']) # print("__room data ____") # print(room_data) # Write header # try: # notify_flag = False fn_write_compare_header() for i in room_data: # print("__ i LOOP ____") if i[0] is None: print("No ID") pass else: # print(i) carthid = i[0] bldgname = i[1] room_type = i[6] canceled = i[16] cancelreason = i[19] checkin = i[10] checkout = i[12] ghost = i[20] oldrectest = datetime.strptime(i[12], '%m/%d/%Y') # print(cancelreason) ghost = i[20] posted = i[21] roomassignmentid = i[22] sess = i[9][:2] year = i[9][-4:] term = i[9] bldg = fn_fix_bldg(i[2]) billcode = fn_get_bill_code(carthid, str(bldg), room_type, roomassignmentid, session, API_server, key) # print(billcode) if billcode == '': billcode = 'No Matching Billcode for ' \ + str(roomassignmentid) # ''' # Intenhsg can be: # R = Resident, O = Off-Campus, C = Commuter # This routine is needed because the adirondack # hall codes match to multiple descriptions and # hall descriptions have added qualifiers such as # FOFF, MOFF, UNF, LOCA that are not available # elsewhere using the API. Have to parse it to # assign a generic room # For non residents, we have a generic room for # CX and a dummy room on the Adirondack side # So we need two variables, on for Adirondack and # one for CX. # ''' # print(bldg) if bldg == 'CMTR': intendhsg = 'C' room = bldgname[(bldgname.find('_') + 1) - len(bldgname):] elif bldg == 'OFF': intendhsg = 'O' room = bldgname[(bldgname.find('_') + 1) - len(bldgname):] elif bldg == 'ABRD': intendhsg = 'O' room = bldgname[(bldgname.find('_') + 1) - len(bldgname):] elif bldg == 'UN': intendhsg = 'R' room = bldgname[(bldgname.find('_') + 1) - len(bldgname):] else: intendhsg = 'R' room = i[4] if posted == 2 and canceled != -1: print("Record 1 of 2") pass else: if posted == 2 and canceled == -1: billcode = 'NOCH' # Posted of 2 represents a change OR cancellation # If not a cancellation, skip the record because # there will be another record posted 0 with the correct # bill record ID if canceled == -1 and cancelreason == 'Withdrawal': rsvstat = 'W' else: rsvstat = 'R' # # ''' # Validate if the stu_serv_rec exists first # update stu_serv_rec id, sess, yr, rxv_stat, # intend_hsg, campus, bldg, room, bill_code # ''' # q_validate_stuserv_rec = ''' select id, sess, yr, rsv_stat, intend_hsg, campus, trim(bldg), trim(room), no_per_room, add_date, trim(bill_code), hous_wd_date from stu_serv_rec where yr = {2} and sess = "{1}" and id = {0}'''.format( carthid, sess, year) # connection = get_connection(EARL) # print(q_validate_stuserv_rec) """ connection closes when exiting the 'with' block """ with connection: data_result = xsql( q_validate_stuserv_rec, connection, key=settings.INFORMIX_DEBUG).fetchall() ret = list(data_result) # connection.close() # print(ret) for row in ret: csrsvstat = row[3] cxintendhsg = row[3] cxbldg = row[6] cxroom = str(row[7]) cxbillcode = row[10] if oldrectest < datetime.strptime( '12/01/2020', '%m/%d/%Y'): oldrec = 'Old Record' else: oldrec = '' if ghost != 0: ghostrec = 'Ghost Record' else: ghostrec = "" # if csrsvstat != rsvstat \ # or cxintendhsg != intendhsg \ if (cxbldg != bldg \ or cxroom != room \ or cxbillcode != billcode)\ and ghostrec == "": print(carthid) with open("Compare.csv", 'a') as output: csvwriter = csv.writer(output) csvwriter.writerow([ carthid, sess, year, rsvstat, csrsvstat, intendhsg, cxintendhsg, room_type, checkin, checkout, bldg, cxbldg, room, cxroom, billcode, cxbillcode, roomassignmentid, oldrec, ghostrec ]) except Exception as e: print("Error in compare_systems.py- Main: " + repr(e)) + str(carthid)