def get(self): key = self.get_argument('file') db = dbi() db.conn() raw_data = db.get(key) data = json.loads(raw_data) response = ''' <h2>%s</h2> <div>Start Time: %s </div> <div>End Time: %s </div> <div>Total Records: %s </div> <div>Flagged Records: %s</div> ''' % (data['FileName'], data['StartTime'], data['FinishTime'], data['TotalRecords'], 0 if 'Flagged' not in data.keys() else len(data['Flagged'])) if 'Flagged' in data.keys(): for name, value in data['Flagged'].iteritems(): response += "<div style='text-indent: 15px;'>%s (note, this is the name as it appears in the file)</div>" % name response += "<div style='text-indent: 25px;'>Potential Matches (data as found in CX):</div>" for id, match in value.iteritems(): response += "<div style='text-indent: 35px;'>Student ID: %s</div>" % id response += "<div style='text-indent: 35px;'>Student Name: %s %s %s</div>" % (match['FirstName'], match['MiddleName'], match['LastName']) response += "<div style='text-indent: 35px;'>State: %s</div>" % match['State'] response += "<div style='text-indent: 35px;'>Matched Middle Name: %s</div>" % match['MatchedMiddle'] response += "<div style='text-indent: 35px;'>Matched State: %s</div>" % match['MatchedState'] self.write(response) self.finish()
def process(filename): reader = csv.DictReader(open('//barnabas/Users/applyweb/' + filename, "rb")) flagged_records = {} fileinfo = {} fileinfo['StartTime'] = datetime.now().strftime('%m-%d-%Y %H:%M:%S') records = 0 for row in reader: records += 1 dup_info = {} row["FULLNAME"] = "%s, %s %s" % (row["NAME_LAST"].strip(), row["NAME_FIRST"].strip(), row["NAME_MIDDLE"].strip()) dup_info, isdup = scan_dupe(row["NAME_LAST"].strip(), row["NAME_FIRST"].strip(), row["NAME_MIDDLE"].strip(), row["STATE"].strip()) if not isdup: #load_data(row) pass else: #finished here on 7 march, 2011. flagged_records should contain all of the matches for each record flagged_records[row["FULLNAME"]] = dup_info ''' now i just need to load the file info into the redis db ''' fileinfo['TotalRecords'] = records fileinfo['FinishTime'] = datetime.now().strftime('%m-%d-%Y %H:%M:%S') fileinfo['FileName'] = filename if len(flagged_records) > 0: fileinfo['Flagged'] = flagged_records import mail mail.notify(fileinfo) print json.dumps(fileinfo) import dbi db = dbi.dbi() db.conn() db.set(filename.split('.')[0], json.dumps(fileinfo)) print 'Data Stored'
def get_file_list(self): transport = paramiko.Transport( (config.get_config('sftp','SERVER'), int(config.get_config('sftp','PORT'))) ) db = dbi() db.conn() transport.connect(username = config.get_config('sftp','USERNAME'), password = config.get_config('sftp','PASSWORD')) sftp = paramiko.SFTPClient.from_transport(transport) files = [] dirfiles = os.listdir('//barnabas/Users/applyweb/') attrs = sftp.listdir_attr(config.get_config('sftp','PATH')) for attr in attrs: if (attr.filename + '.csv') not in dirfiles: sftp.get(config.get_config('sftp','PATH') + attr.filename, '//barnabas/Users/applyweb/' + attr.filename + ".csv") files.append(attr.filename + ".csv") sftp.close() transport.close() return files
def get(self): files = [] db = dbi() db.conn() flist = os.listdir('//barnabas/Users/applyweb') for f in flist: fname = f.split('.')[0] data = db.get(fname) if data is not None: data = json.loads(data) if 'Flagged' in data.keys(): files.append( (f,re.search("[0-9]{8}",f).group(0), 'flagged') ) else: files.append( (f,re.search("[0-9]{8}",f).group(0), 'good') ) else: files.append( (f,re.search("[0-9]{8}",f).group(0), 'sync') ) self.render("main.html", files = sorted(files, key=lambda f: f[1], reverse=True))