def save_data_frame(data): sys.stdout.write("Saving " + str(len(data)) + " records: ") sys.stdout.flush() myconn = get_conn() mycursor = myconn.cursor(pymysql.cursors.DictCursor) for index, record in data.iterrows(): sql = "select count(*) as num from job_posting inner join source on job_posting.source_id = source.id where source.source = %s and job_posting.source_record_id = %s" mycursor.execute(sql, (record["source"], record["source_record_id"])) myresult = mycursor.fetchone() if (myresult["num"] == 0): sql = "insert into job_posting (" for col in data.columns: if (col == "source"): sql += "source_id" + ", " else: sql += col + ", " values = () sql = sql[0:len(sql) - 2] + ") values ( " for col in data.columns: if (col == "source"): sql += "(select id from source where source = %s) , " else: sql += "%s , " sql = sql[0:len(sql) - 2] + ")" mycursor.execute(sql, record.tolist()) sys.stdout.write('+') else: sys.stdout.write('.') sys.stdout.flush() myconn.commit() mycursor.close() myconn.close() sys.stdout.write("\r\n")
def wrapper(*args, **kws): global gui_error, repo_version try: return fn(*args, **kws) except Exception, e: contents = get_tb() contents += str(type(e)) + "(" + str(e) + ")\n" contents += "\nplatform.uname():\n" + str(platform.uname()) contents += "\nRepo version: " + repo_version if config.current["debug"]: import pdb pdb.set_trace() if gui_error != None: gui_error() try: conn = config.get_conn() conn.request( "POST", "/upload/error", contents, {"Content-Type": "text/plain", "Content-Length": str(len(contents))}, ) conn.getresponse().read() except Exception: open("error.log", "w+").write("Error sending traceback:\n" + contents) return
def is_puid_uploaded(hfile): conn = get_conn() body = urllib.urlencode(hfile.puid_tags) headers = {'Content-type': 'application/x-www-form-urlencoded'} url = get_url('/upload/tags') conn.request('POST', url, body, headers) response = check_response(conn.getresponse()) if response == 'done': hfile.uploaded = True return True else: return False
def tag_upload(tags): if tags == []: return json = simplejson.dumps(tags) url = get_url('/upload/tags') params = { 'version': tags[0]['version'], 'tags': json } body = urllib.urlencode(params) headers = {'Content-type': 'application/x-www-form-urlencoded'} conn = get_conn() conn.request('POST', url, body, headers) return simplejson.loads(check_response(conn.getresponse()))
def get_job_posting_urls(): myconn = get_conn() mycursor = myconn.cursor(pymysql.cursors.DictCursor) mycursor.execute(""" select source.source, method.method, job_posting_url.* from job_posting_url inner join source on job_posting_url.source_id = source.id join method on job_posting_url.method_id = method.id -- where url like '%taleo%' """) myresult = mycursor.fetchall() mycursor.close() myconn.close() return myresult
def upload_file(hfile): contents = hfile.contents guimgr.start_upload(hfile.ppname, len(contents)) url = get_url('/upload/file/' + hfile.sha) conn = get_conn() if hfile.puid: url += '&puid=' + hfile.puid if config.current['rate_limit']: response = rate_limit.post(conn, url, contents, guimgr.upload_progress) else: conn.request('POST', url, contents, {'Content-Type':'audio/x-mpeg-3'}) response = conn.getresponse() check_response(response) hfile.uploaded = True
def create_table(): db = get_conn() db.execute(''' CREATE TABLE IF NOT EXISTS user( user_id INTEGER PRIMARY KEY UNIQUE NOT NULL, email TEXT UNIQUE NOT NULL, password TEXT NOT NULL )''') db.execute(''' CREATE TABLE IF NOT EXISTS mark( mark_id INTEGER PRIMARY KEY UNIQUE NOT NULL, mark_author TEXT, mark_name TEXT, notes text, location text, user_id INTEGER NOT NULL, FOREIGN KEY(user_id) REFERENCES user(user_id) )''')
def do_conn(self, arg, opts=None): (host, port, db) = config.get_conn(int(arg)) self.prompt = "%s:%s > " % (host, str(db)) self.conn = RedisClient(host, port, db)
def drop_table(table): db = get_conn() if table == 'mark': db.execute(' DROP TABLE IF EXISTS mark')