def bcpFiles(): if DEBUG or not bcpon: #print execSQL return markerFile.close() refFile.close() aliasFile.close() db.commit() bcp1 = bcpCommand % (markerTable, markerFileName) bcp2 = bcpCommand % (refTable, refFileName) bcp3 = bcpCommand % (aliasTable, aliasFileName) # execute the sql deletions for r in execSQL: db.sql(r, None) for bcpCmd in [bcp1, bcp2, bcp3]: diagFile.write('%s\n' % bcpCmd) os.system(bcpCmd) db.commit() return
def processMirbase(mgiID, mbIDs): # # Delete association mirbase associations to mgiID - some won't exist # If mbIDs = '', could be a miRNA marker that we want to delete miRBase # ids from # db.sql('''select a1._Accession_key as aKey, a1._Object_key as _Marker_key, a1.accid as mbID into temp mirbase from ACC_Accession a1 where a1._MGIType_key = 2 and a1._LogicalDB_key = 83''', None) db.sql('create index idx1 on mirbase(_Marker_key)', None) results = db.sql('''select m.aKey, m.mbID from mirbase m, ACC_Accession a where m._Marker_key = a._Object_key and a._MGIType_key = 2 and a._LogicalDB_key = 1 and a.accid = '%s' ''' % mgiID, 'auto') db.sql('drop table mirbase', None) for r in results: deleteAccession(r['aKey']) # write out to assocload input file if mbIDs != '': fpMirbaseAssoc.write('%s%s%s%s' % (mgiID, TAB, mbIDs, CRT)) db.commit()
def processSlim(): dosanityFileName = os.environ['DO_MGI_SLIM_SANITY_FILE'] dosanityFile = open(dosanityFileName, 'w') DELETE_SLIM = 'delete from MGI_SetMember where _Set_key = 1048 and _SetMember_key = %s' SPACE = ' ' dosanityFile.write('\n\nDO slim terms that are decendents of another DO slim term\n\n') dosanityFile.write('descendent_term' + 35*SPACE + 'another_slim_term\n') dosanityFile.write('---------------' + 35*SPACE + '-----------------\n\n') results = db.sql(''' select tt.term as descendent_term, ss.term as another_slim_term, t._SetMember_key from MGI_SetMember t, DAG_Closure dc, MGI_SetMember s, VOC_Term tt, VOC_Term ss where t._Set_key = 1048 and t._Object_key = dc._DescendentObject_key and dc._AncestorObject_key = s._Object_key and s._Set_key = 1048 and t._Object_key != s._Object_key and t._Object_key = tt._Term_key and s._Object_key = ss._Term_key ''', 'auto') for r in results: dosanityFile.write('%-50s %-50s\n' % (r['descendent_term'], r['another_slim_term'])) deleteSQL = DELETE_SLIM % (r['_SetMember_key']) #dosanityFile.write(deleteSQL + '\n\n') db.sql(deleteSQL, None) dosanityFile.close() db.commit() return 0
def _updateExpressionCache(assayKey, results): """ Do live update on results for assayKey """ db.sql('begin transaction', None) # delete all cache records for assayKey deleteSql = 'delete from %s where _assay_key = %s' % (TABLE, assayKey) db.sql(deleteSql, None) db.commit() maxKey = _fetchMaxExpressionKey() # insert new results for result in results: maxKey += 1 result.insert(0, maxKey) insertSql = INSERT_SQL % tuple([_sanitizeInsert(c) for c in result]) db.sql(insertSql, None) db.commit() db.sql('commit transaction', None)
def import_db_dump(archive_path): """Import data from .tar.xz archive into the database.""" pxz_command = ["pxz", "--decompress", "--stdout", archive_path] pxz = subprocess.Popen(pxz_command, stdout=subprocess.PIPE) table_names = _TABLES.keys() with create_cursor() as cursor: with tarfile.open(fileobj=pxz.stdout, mode="r|") as tar: for member in tar: file_name = member.name.split("/")[-1] if file_name == "SCHEMA_SEQUENCE": # Verifying schema version schema_seq = int(tar.extractfile(member).read().strip()) if schema_seq != db.SCHEMA_VERSION: raise Exception("Incorrect schema version! Expected: %d, got: %d." "Please, get the latest version of the dump." % (db.SCHEMA_VERSION, schema_seq)) else: logging.info("Schema version verified.") else: if file_name in table_names: logging.info(" - Importing data into %s table..." % file_name) cursor.copy_from(tar.extractfile(member), '"%s"' % file_name, columns=_TABLES[file_name]) commit() pxz.stdout.close()
def drop_tables(self): with create_cursor() as cursor: # TODO(roman): See if there's a better way to drop all tables. # FIXME: Need to drop all tables that we have there. #cursor.execute('DROP TABLE IF EXISTS i_am_a_table CASCADE;') pass commit()
def update(dataset_id, dictionary, author_id): # TODO(roman): Make author_id argument optional (keep old author if None). jsonschema.validate(dictionary, BASE_JSON_SCHEMA) with create_cursor() as cursor: if "description" not in dictionary: dictionary["description"] = None cursor.execute("""UPDATE dataset SET (name, description, public, author) = (%s, %s, %s, %s) WHERE id = %s""", (dictionary["name"], dictionary["description"], dictionary["public"], author_id, dataset_id)) # Replacing old classes with new ones cursor.execute("""DELETE FROM dataset_class WHERE dataset = %s""", (dataset_id,)) for cls in dictionary["classes"]: if "description" not in cls: cls["description"] = None cursor.execute("""INSERT INTO dataset_class (name, description, dataset) VALUES (%s, %s, %s) RETURNING id""", (cls["name"], cls["description"], dataset_id)) cls_id = cursor.fetchone()[0] for recording_mbid in cls["recordings"]: cursor.execute("INSERT INTO dataset_class_member (class, mbid) VALUES (%s, %s)", (cls_id, recording_mbid)) # If anything bad happens above, it should just rollback by default. commit()
def create_from_dict(dictionary, author_id): """Creates a new dataset from a dictionary. Returns: Tuple with two values: new dataset ID and error. If error occurs first will be None and second is an exception. If there are no errors, second value will be None. """ jsonschema.validate(dictionary, BASE_JSON_SCHEMA) with create_cursor() as cursor: if "description" not in dictionary: dictionary["description"] = None cursor.execute("""INSERT INTO dataset (id, name, description, public, author) VALUES (uuid_generate_v4(), %s, %s, %s, %s) RETURNING id""", (dictionary["name"], dictionary["description"], dictionary["public"], author_id)) dataset_id = cursor.fetchone()[0] for cls in dictionary["classes"]: if "description" not in cls: cls["description"] = None cursor.execute("""INSERT INTO dataset_class (name, description, dataset) VALUES (%s, %s, %s) RETURNING id""", (cls["name"], cls["description"], dataset_id)) cls_id = cursor.fetchone()[0] for recording_mbid in cls["recordings"]: cursor.execute("INSERT INTO dataset_class_member (class, mbid) VALUES (%s, %s)", (cls_id, recording_mbid)) # If anything bad happens above, it should just rollback by default. commit() return dataset_id
def bcpFiles(): if DEBUG or not bcpon: return primerFile.close() markerFile.close() refFile.close() aliasFile.close() accFile.close() accRefFile.close() noteFile.close() db.commit() bcp1 = bcpCommand % (primerTable, primerFileName) bcp2 = bcpCommand % (markerTable, markerFileName) bcp3 = bcpCommand % (refTable, refFileName) bcp4 = bcpCommand % (aliasTable, aliasFileName) bcp5 = bcpCommand % (accTable, accFileName) bcp6 = bcpCommand % (accTable, accFileName) bcp7 = bcpCommand % (noteTable, noteFileName) for bcpCmd in [bcp1, bcp2, bcp3, bcp4, bcp5, bcp6, bcp7]: diagFile.write('%s\n' % bcpCmd) os.system(bcpCmd) db.commit() return
def get_related_labels(labels, not_labels, unlimited=True): current_app.logger.debug(labels) if not labels and not not_labels: return [] limit = '' if unlimited else ' LIMIT 20' sql_in = ','.join(labels + not_labels) having = [] if labels: having.extend(map(lambda x: 'SUM(CASE WHEN label=%s THEN 1 ELSE 0 END) > 0' % x, labels)) if not_labels: having.extend(map(lambda x: 'SUM(CASE WHEN label=%s THEN 1 ELSE 0 END) = 0' % x, not_labels)) having = ' AND '.join(having) db.execute("CREATE TEMPORARY TABLE matches (image INTEGER NOT NULL PRIMARY KEY)") db.execute("INSERT INTO matches SELECT image FROM " + db.tbl_image_label + " GROUP BY image HAVING " + having) related = db.fetch("SELECT id, name, COUNT(A.image) AS count FROM matches INNER JOIN " + db.tbl_image_label + " A ON matches.image=A.image INNER JOIN " + db.tbl_label + " ON (A.label = id) WHERE A.label NOT IN (" + sql_in + ") GROUP BY id HAVING COUNT(A.image) > 0 ORDER BY count DESC, name ASC" + limit) db.execute("DROP TABLE matches") db.commit() return related
def bcpFiles(): ''' # requires: # # effects: # BCPs the data into the database # # returns: # nothing # ''' db.commit() db.useOneConnection() noteFile.close() noteChunkFile.close() sqlFile.close() if DEBUG: return bcpCommand = os.environ['PG_DBUTILS'] + '/bin/bcpin.csh' currentDir = os.getcwd() bcpNote = '%s %s %s %s %s %s "\\t" "\\n" mgd' \ % (bcpCommand, db.get_sqlServer(), db.get_sqlDatabase(), noteTable, currentDir, noteFileName) diagFile.write('%s\n' % bcpNote) os.system(bcpNote) bcpNote = '%s %s %s %s %s %s "\\t" "\\n" mgd' \ % (bcpCommand, db.get_sqlServer(), db.get_sqlDatabase(), noteChunkTable, currentDir, noteChunkFileName) diagFile.write('%s\n' % bcpNote) os.system(bcpNote)
def signin(db): session = bottle.request.environ.get('beaker.session') username = bottle.request.forms.username.strip().lower() email = bottle.request.forms.email.strip().lower() username_re = re.compile(r'^[_0-9A-Za-z]{5,20}$') email_re = re.compile(r'^[a-zA-Z0-9._%-+]+@[a-zA-Z0-9._%-]+.[a-zA-Z]{2,6}$') password = bottle.request.forms.password if not username_re.match(username): return {'ok':False,'msg':u"Имя пользователя должно быть не менее 5 и \ не более 20 символов, и содержать латинские буквы и цифры и знак подчеркивания"} if not email_re.match(email): return {'ok':False, 'msg': u"Неправильный формат e-mail"} if len(password) < 6: return {'ok':False,'msg':u"Пароль должен содержать не менее 7 символов"} user = db.query(User).filter(or_(User.name==username,User.email==email)).first() if not user: new_user = User(name = username, password=hashlib.sha256(password).hexdigest(), email = email) new_user_notebook = Notebook(name=u"Блокнот", deflt=True) new_user_notebook.user = new_user new_note = Note(text=u"Добро пожаловать в Notebook!", title=u"Первая заметка"); new_user_notebook.notes.append(new_note); db.add(new_user) db.commit() session['user_id'] = new_user.id return {'ok':True, 'msg':u"Пользователь {0} создан.".format(new_user.name)} else: return {'ok':False,'msg':u"Пользователь с таким именем или адресом e-mail уже существует"}
def send_message(textmessage, db): print "Send_request" db.add_or_update(textmessage) textmessage.src_object = db.get_one_by_id(Employee, textmessage.src) textmessage.dst_object = db.get_one_by_id(Employee, textmessage.dst) db.commit() #networkcomponent.send(SETTINGS.)
def bcpFiles( recordsProcessed # number of records processed (integer) ): global referenceKey if DEBUG or not bcpon: return outImageFile.close() outPaneFile.close() outAccFile.close() outCopyrightFile.close() outCaptionFile.close() db.commit() bcp1 = bcpCommand % (imageTable, iFileName) bcp2 = bcpCommand % (paneTable, pFileName) bcp3 = bcpCommand % (accTable, aFileName) for bcpCmd in [bcp1, bcp2, bcp3]: diagFile.write('%s\n' % bcpCmd) os.system(bcpCmd) # update the max Accession ID value db.sql('''select * from ACC_setMax (%d)''' % (recordsProcessed), None) db.commit() return
def executeBCP(): ''' # requires: # # effects: # BCPs the data into the database # # returns: # nothing # ''' synFile.close() db.commit() bcpCommand = os.environ['PG_DBUTILS'] + '/bin/bcpin.csh' bcp1 = '%s %s %s %s %s %s "|" "\\n" mgd' % \ (bcpCommand, db.get_sqlServer(), db.get_sqlDatabase(), 'MGI_Synonym', datadir, 'MGI_Synonym.bcp') diagFile.write('%s\n' % bcp1) os.system(bcp1) # update mgi_synonym_seq auto-sequence db.sql(''' select setval('mgi_synonym_seq', (select max(_synonym_key) from MGI_Synonym)) ''', None) db.commit()
def set_labels(self, label_names): assert hasattr(self, 'id'), "id must be set before fetching data" # select ids for each label labels = [] for label_name in label_names: label = db.fetch("SELECT id FROM " + db.tbl_label + " WHERE name=%s", [label_name], one=True, as_list=True) if label is None: label = db.execute("INSERT INTO " + db.tbl_label + "(name) VALUES(%s) RETURNING id", [label_name]) db.commit() labels.append(label) # get current labels current_labels = db.fetch("SELECT label FROM " + db.tbl_image_label + " WHERE image=%s", [self.id], as_list=True) # update database to_be_added = diff(labels, current_labels) to_be_deleted = diff(current_labels, labels) for label in to_be_added: db.execute("INSERT INTO " + db.tbl_image_label + "(image, label) VALUES(%s,%s)", [self.id, label]) for label in to_be_deleted: db.execute("DELETE FROM " + db.tbl_image_label + " WHERE image=%s AND label=%s", [self.id, label]) # if label is not used anymore, delete it permanently count = db.fetch("SELECT COUNT(image) FROM label_image WHERE label=%s", [label], one=True, as_list=True) if not count: db.execute("DELETE FROM " + db.tbl_label + " WHERE id=%s", [label]) db.commit() return labels
def updateMarkerType (): for mgiID in markersToUpdateDict: typeTerm = markersToUpdateDict[mgiID] mrkTypeKey = mkrTypeToKeyDict[typeTerm] results = db.sql(MARKER_KEY % mgiID, 'auto') mrkKey = results[0]['_Marker_key'] db.sql(UPDATE % (mrkTypeKey, updatedByKey, mrkKey), None) db.commit()
def log(image_id, status): user_id = session.get('auth', None) if user_id is None: user_id = request.remote_addr if user_id in config.ADMINS: return db.execute("INSERT INTO " + db.tbl_image_log + " VALUES(%s, %s, %s, NOW())", (image_id, user_id, status)) db.commit()
def _create_new_inc_dump_record(): """Creates new record for incremental dump and returns its ID and creation time.""" with create_cursor() as cursor: cursor.execute("INSERT INTO incremental_dumps (created) VALUES (now()) RETURNING id, created") commit() row = cursor.fetchone() logging.info("Created new incremental dump record (ID: %s)." % row[0]) return row
def create(musicbrainz_id): with create_cursor() as cursor: # TODO(roman): Do we need to make sure that musicbrainz_id is case insensitive? cursor.execute('INSERT INTO "user" (musicbrainz_id) VALUES (%s) RETURNING id', (musicbrainz_id,)) commit() new_id = cursor.fetchone()[0] return new_id
def createExperimentMaster(): ''' # requires: # # effects: # preparing for new/existing MLD_Expts, MLD_Expt_Marker # # returns: # nothing # ''' # # only run this once after the input file is ready to pick up the J: # global exptDict, seqExptDict global exptTag results = db.sql('''select _Expt_key, chromosome, tag from MLD_Expts where _Refs_key = %d order by tag''' % (referenceKey), 'auto') # experiment records exists if len(results) > 0: # if 'full', then delete existing MLD_Expt_Marker records if mode == 'full': # delete the existing *details*..... db.sql('delete MLD_Expt_Marker from MLD_Expt_Marker m, MLD_Expts e ' + \ ' where e._Refs_key = %d and e._Expt_key = m._Expt_key ' % (referenceKey), \ 'auto', execute = not DEBUG) # set seqExptDict to save the next max(sequenceNum) for each _Expt_key/chromosome for r in results: exptDict[r['chromosome']] = r['_Expt_key'] s = db.sql('''select max(sequenceNum) + 1 as maxKey from MLD_Expt_Marker where _Expt_key = %d''' % (r['_Expt_key']), 'auto') if s[0]['maxKey'] is None: seqExptDict[r['_Expt_key']] = 1 else: seqExptDict[r['_Expt_key']] = s[0]['maxKey'] exptTag = r['tag'] + 1 # if no experiment records exist....create them else: for c in chromosomeList: if c in inputChrList: createExperimentBCP(c) # Update the AccessionMax value db.sql('select * from ACC_setMax (%d)' % (exptTag), None) db.commit()
def set_job_result(job_id, result): with create_cursor() as cursor: cursor.execute( "UPDATE dataset_eval_jobs " "SET (result, updated) = (%s, current_timestamp) " "WHERE id = %s", (result, job_id) ) commit()
def car_brand_map_insert(car_brand): db = car_type_pool.connection() cursor = db.cursor() sql = """INSERT INTO `car_brand_map`(`273_brand_id`, `273_brand_name`, `168_brand_id`, `168_brand_name`) VALUES (%d, '%s', %d, '%s')""" % (car_brand['273_brand_id'], car_brand['273_brand_name'], car_brand['168_brand_id'], car_brand['168_brand_name']) print sql result = cursor.execute(sql) db.commit() cursor.close() db.close()
def get_note(notebook_id, db): session = bottle.request.environ.get('beaker.session') user_id = session.get('user_id') try: notebook = db.query(Notebook).filter_by(user_id = user_id, id = notebook_id, deflt=False).one() db.delete(notebook) db.commit() return {} except NoResultFound: return {}
def __on_add(self, evt): add_data_frame = AddDataFrame(self) if add_data_frame.ShowModal() == wx.ID_OK: data = add_data_frame.GetData() db.add_data(data[0], data[1], data[2]) self.list_judul.Insert(str(data[0] + ' - ' + data[1]), 0) self.list_judul.Refresh() self.list_judul.SetStringSelection(str(data[0] + ' - ' + data[1]), True) db.commit() add_data_frame.Destroy()
def update_db(self, **kwargs): """write **kwargs to db, also update 'last_checked'""" if self.source_id: cur = db.cursor() kwargs['last_checked'] = time.strftime('%Y-%m-%d %H:%M:%S') query = "UPDATE sources SET urlhash=MD5(url),{} WHERE source_id = %s".format( ",".join(k+"=%s" for k in kwargs.keys())) cur.execute(query, tuple(kwargs.values()) + (self.source_id,)) debug(3, cur._last_executed) db.commit()
def _create_job(dataset_id): with create_cursor() as cursor: cursor.execute( "INSERT INTO dataset_eval_jobs (id, dataset_id, status) " "VALUES (uuid_generate_v4(), %s, %s) RETURNING id", (dataset_id, STATUS_PENDING) ) job_id = cursor.fetchone()[0] commit() return job_id
def delete(self, keep_file=False): # db.execute("DELETE FROM " + db.tbl_image_log + " WHERE image=%s", [self.id]) # db.execute("DELETE FROM " + db.tbl_image_rating + " WHERE image=%s", [self.id]) # db.execute("DELETE FROM " + db.tbl_image_referrer + " WHERE image=%s", [self.id]) db.execute("DELETE FROM " + db.tbl_image_label + " WHERE image=%s", [self.id]) db.execute("DELETE FROM " + db.tbl_image + " WHERE id=%s", [self.id]) db.commit() if not keep_file: # TODO: add file deletion, check existence as it can be deleted postfactum pass
def add(info): conn, c = connect() c.execute("SELECT * FROM users WHERE username=?", (info[0],)) data = c.fetchone() if data is None: c.execute("INSERT INTO users VALUES (?,?,?)", [info[0], sha256_crypt.encrypt(info[1]), info[2]]) commit(conn) disconnect(conn) return "success" else: return "username taken"
def update_model_id_of_allnet_car_source(model_id, car_sale_id): db = wcar_pool.connection() cursor = db.cursor() sql = """UPDATE `car_allnet_source` SET `model_id` = %d WHERE `id` = %d""" % (model_id, car_sale_id) try: cursor.execute(sql) # utils.debug("update %d's model_id to %d." % (car_sale_id, model_id)) db.commit() except: db.rollback() db.close()
def create_multiple(conn, assets): """ Create a database record for each asset. Returns asset list. Asset's is_active field is updated before returning. """ with db.commit(conn) as c: for asset in assets: if 'is_active' in asset: asset.pop('is_active') c.execute(queries.create(asset.keys()), asset.values()) asset.update({'is_active': is_active(asset)}) return assets
def bcpFiles(): diagFile.write(execAssaySQL) diagFile.write(execRefSQL) diagFile.write(execProbeSQL) if DEBUG or not bcpon: return refFile.close() aliasFile.close() db.commit() # execute the sql commands # move assay information from fromID to toID for r in execAssaySQL: db.sql(r, None) # move fromID (from) references to toID db.sql(execRefSQL, None) # delete fromID (from) db.sql(execProbeSQL, None) db.commit() bcp1 = bcpCommand % (refTable, refFileName) bcp2 = bcpCommand % (aliasTable, aliasFileName) for bcpCmd in [bcp1, bcp2]: diagFile.write('%s\n' % bcpCmd) os.system(bcpCmd) db.commit() return
def reference(): Entry = namedtuple('Entry', ['professor_email']) ref_value = Reference.load(current_user.id) if ref_value.ref_list: data = {'ref_list': []} for professor_email in ref_value.ref_list: data['ref_list'].append(Entry(professor_email)) form = ReferenceListForm(data=data) else: form = ReferenceListForm() if form.validate_on_submit(): if form.add.data: if bool(form.professor_email.data) and Professor.load(form.professor_email.data) is not None: ref_value.add(form.professor_email.data) flash('Reference added', 'success') commit() else: flash('Invalid email', 'danger') return redirect(url_for('reference.reference')) else: for index in range(len(form.ref_list)): if form.ref_list[index].save.data: if bool(form.ref_list[index].professor_email.data) and Professor.load( form.ref_list[index].professor_email.data) is not None: ref_value.update(index, form.ref_list[index].professor_email.data) commit() flash('Updated successfully', 'success') else: flash('Invalid email', 'danger') return redirect(url_for('reference.reference')) if form.ref_list[index].delete.data: ref_value.delete(index) commit() flash('Deleted successfully', 'success') return redirect(url_for('reference.reference')) return render_template('reference.html', form=form)
def init_db(): db = get_db() with app.open_resource('schema.sql', mode='r') as f: db.cursor().executescript(f.read()) db.commit()
def edit_db(query, args=()): db = get_db() db.execute(query, args) db.commit()
async def response_handler(self, request): query = request.query if 'code' not in query or 'state' not in query: raise aiohttp.web.HTTPBadRequest() code = query['code'] state = query['state'] if state not in self.stateMap: print("invalid state") raise aiohttp.web.HTTPBadRequest() formdata = { 'grant_type': 'authorization_code', 'client_id': config.SPOTIFY_CLIENT_ID, 'code': code, 'redirect_uri': 'http://' + config.SPOTIFY_REDIRECT_HOST + ':' + str(config.SPOTIFY_REDIRECT_PORT) + '/spotify' } auth = config.SPOTIFY_CLIENT_ID + ':' + config.SPOTIFY_CLIENT_SECRET headers = { 'User-Agent': USER_AGENT, 'Accept': 'application/json', 'Authorization': 'Basic ' + base64.b64encode(auth.encode('ascii')).decode('ascii') } r = requests.post('https://accounts.spotify.com/api/token', data=formdata, headers=headers) json = r.json() if 'error' in json: return web.Response( status=500, text= f"Auth server returned error: {json['error']} - {json['error_description']}" ) access_token = json['access_token'] token_type = json['token_type'] refresh_token = json['refresh_token'] expiry = math.floor(time.time()) + json['expires_in'] discord_user_id = self.stateMap[state] cur = db.get_cursor() db.execute( cur, "SELECT COUNT(*) FROM `spotify_tokens` WHERE discord_user = ?", (discord_user_id, )) num = cur.fetchone()[0] if num > 0: db.execute(cur, "DELETE FROM `spotify_tokens` WHERE `discord_user`=?", (discord_user_id, )) db.execute( cur, "INSERT INTO `spotify_tokens` (`discord_user`, `access_token`, `expiry`, `refresh_token`, `token_type`) VALUES (?,?,?,?,?)", (discord_user_id, access_token, expiry, refresh_token, token_type)) db.commit() discord_user = self.bot.get_user(discord_user_id) try: headers = { 'User-Agent': USER_AGENT, 'Accept': 'application/json', 'Authorization': token_type + ' ' + access_token } r = requests.get('https://api.spotify.com/v1/me', headers=headers) json = r.json() await discord_user.send( f"Successfully linked to Spotify account `{json['display_name']}`. Use {config.COMMAND_PREFIX}spotifyunlink to unlink." ) except: pass return web.Response( text= f'Your Spotify account has been linked. You can close this tab now.' )
def log_commit(mess): log(mess) db.commit()
def removerMember(Mem): db = get_db() query_db( 'DELETE FROM membership WHERE id = (?)', [Mem.getMemId()]) db.commit()
def logout(db): c = request.get_cookie("auth") if not c: return False commit(db, "delete from sessions where session_id = ?", c.encode("utf-8")) return {}
async def handler(self, request): query = request.query if 'code' not in query or 'state' not in query: raise aiohttp.web.HTTPBadRequest() code = query['code'] state = query['state'] if state not in self.stateMap: print("invalid state") raise aiohttp.web.HTTPBadRequest() formdata = { 'grant_type': 'authorization_code', 'client_id': config.ED_FDEV_CLIENT_ID, 'code_verifier': self.stateChallenges[state], 'code': code, 'redirect_uri': 'http://' + config.ED_FDEV_REDIRECT_HOST + ':' + str(config.ED_FDEV_REDIRECT_PORT) + '/fd' } headers = {'User-Agent': USER_AGENT, 'Accept': 'application/json'} r = requests.post('https://auth.frontierstore.net/token', data=formdata, headers=headers) json = r.json() if 'message' in json: return web.Response( status=500, text=f"Auth server returned error: {json['message']}") access_token = json['access_token'] token_type = json['token_type'] refresh_token = json['refresh_token'] expiry = math.floor(time.time()) + json['expires_in'] discord_user_id = self.stateMap[state] cur = db.get_cursor() db.execute( cur, "SELECT COUNT(*) FROM `fdev_tokens` WHERE discord_user = ?", (discord_user_id, )) num = cur.fetchone()[0] if num > 0: db.execute(cur, "DELETE FROM `fdev_tokens` WHERE `discord_user`=?", (discord_user_id, )) db.execute( cur, "INSERT INTO `fdev_tokens` (`discord_user`, `access_token`, `expiry`, `refresh_token`, `token_type`) VALUES (?,?,?,?,?)", (discord_user_id, access_token, expiry, refresh_token, token_type)) db.commit() discord_user = self.bot.get_user(discord_user_id) try: headers = { 'User-Agent': USER_AGENT, 'Accept': 'application/json', 'Authorization': token_type + ' ' + access_token } r = requests.get('https://auth.frontierstore.net/me', headers=headers) user_info = r.json() await discord_user.send( f"Successfully linked to Frontier account `{user_info['email']}`. Use {config.COMMAND_PREFIX}edunlink to unlink." ) except: pass return web.Response(text='Logged in. You can close this tab now.')
def update(username, new_pass): conn, c = connect() c.execute("UPDATE users SET password=? WHERE username=?", (sha256_crypt.encrypt(new_pass), username)) commit(conn) disconnect(conn)
def project(): Entry = namedtuple( 'Entry', ['title', 'professors', 'start', 'end', 'text_description']) project_value = Project.load(current_user.id) if project_value.project_list: data = {'project_list': []} for project_dict in project_value.project_list: data['project_list'].append( Entry(project_dict['title'], "\n".join(project_dict['professor_list']), project_dict['start_date'], project_dict['end_date'], project_dict['description'])) form = ProjectListForm(data=data) else: form = ProjectListForm() if form.validate_on_submit(): if form.add.data: if bool(form.title.data) and bool(form.professors.data): prof_list = [ prof.strip("\r") for prof in form.professors.data.split('\n') ] if prof_list[-1] == '': del prof_list[-1] check = True for prof in prof_list: if Professor.load(prof) is None: check = False if check: project_dict = { 'title': form.title.data, 'professor_list': set(prof_list), 'start_date': form.start.data, 'end_date': form.end.data, 'description': form.text_description.data } project_value.add(project_dict) flash('Project added', 'success') commit() else: flash('Bad emails', 'danger') else: flash('Empty field', 'danger') return redirect(url_for('project.project')) else: for index in range(len(form.project_list)): if form.project_list[index].save.data: if bool(form.project_list[index].title.data) and bool( form.project_list[index].professors.data): prof_list = [ prof.strip("\r") for prof in form. project_list[index].professors.data.split('\n') ] if prof_list[-1] == '': del prof_list[-1] check = True for prof in prof_list: if Professor.load(prof) is None: check = False if check: project_dict = { 'title': form.project_list[index].title.data, 'professor_list': set(prof_list), 'start_date': form.project_list[index].start.data, 'end_date': form.project_list[index].end.data, 'description': form.project_list[index].text_description.data } project_value.update(index, project_dict) commit() flash('Updated successfully', 'success') else: flash('Bad emails', 'danger') else: flash('Empty field', 'danger') return redirect(url_for('project.project')) if form.project_list[index].delete.data: project_value.delete(index) commit() flash('Deleted successfully', 'success') return redirect(url_for('project.project')) return render_template('project.html', form=form)
def add_review(hotel_id, cur_date, rating, user_id, title, details): query=""" insert into reviews(hotel_id, review_date, review_rating, review_username, review_title, review_text) values (%s, %s, %s, %s, %s, %s) """ db.commit(query, (hotel_id, cur_date, rating, user_id, title, details))
def processFile(): # Purpose: Read the input file, resolve values to keys. Create bcp files # Returns: 1 if error, else 0 # Assumes: file descriptors have been initialized # Effects: exits if the line does not have 15 columns # Throws: Nothing global alleleKey, refAssocKey, accKey, noteKey, mgiKey, annotKey global alleleLookup, alleleMutationKey lineNum = 0 # For each line in the input file for line in fpInputFile.readlines(): error = 0 lineNum = lineNum + 1 print('%s: %s' % (lineNum, line)) # Split the line into tokens tokens = line[:-1].split('\t') try: markerID = tokens[0] markerSymbol = tokens[1] mutationType = tokens[2] # IMPC allele type description = tokens[3] colonyID = tokens[4] strainOfOrigin = tokens[5] alleleSymbol = tokens[6] alleleName = tokens[7] inheritanceMode = tokens[8] alleleType = tokens[9] # IMPC allele class alleleSubType = tokens[10] alleleStatus = tokens[11] transmission = tokens[12] collection = tokens[13] jNum = tokens[14] createdBy = tokens[15] except: print('exiting with invalid line') exit(1, 'Invalid Line (%d): %s\n' % (lineNum, line)) print('validating data and getting keys') # marker key markerKey = loadlib.verifyMarker(markerID, lineNum, fpErrorFile) # _vocab_key = 36 (Allele Molecular Mutation) mutationList = str.split(mutationType, ';') if len(mutationList) > 1: print('mutationList: %s' % mutationList) mutationKeyList = [] for m in mutationList: mutationKey = loadlib.verifyTerm('', 36, m, lineNum, fpErrorFile) if mutationKey != 0: mutationKeyList.append(mutationKey) if len(mutationKeyList) > 1: print('mutationKeyList: %s' % mutationKeyList) # strains strainOfOriginKey = sourceloadlib.verifyStrain(strainOfOrigin, lineNum, fpErrorFile) # _vocab_key = 35 (Allele Inheritance Mode) inheritanceModeKey = loadlib.verifyTerm('', 35, inheritanceMode, lineNum, fpErrorFile) # _vocab_key = 38 (Allele Type) alleleTypeKey = loadlib.verifyTerm('', 38, alleleType, lineNum, fpErrorFile) # _vocab_key = 93 (Allele Subtype) subTypeList = str.split(alleleSubType, ';') if len(subTypeList) > 1: print('subTypeList: %s' % subTypeList) subTypeKeyList = [] for s in subTypeList: if s != '': # if we have a subtype, get it's key subTypeKey = loadlib.verifyTerm('', 93, s, lineNum, fpErrorFile) if subTypeKey != 0: subTypeKeyList.append(subTypeKey) if len(subTypeKeyList) > 1: print('subTypeKeyList: %s' % subTypeKeyList) # _vocab_key = 37 (Allele Status) alleleStatusKey = loadlib.verifyTerm('', 37, alleleStatus, lineNum, fpErrorFile) # _vocab_key = 61 (Allele Transmission) transmissionKey = loadlib.verifyTerm('', 61, transmission, lineNum, fpErrorFile) # _vocab_key = 92 collectionKey = loadlib.verifyTerm('', 92, collection, lineNum, fpErrorFile) # _vocab_key = 73 (Marker-Allele Association Status) # _term_key = 4268545 (Curated) markerStatusKey = 4268545 # reference refKey = loadlib.verifyReference(jNum, lineNum, fpErrorFile) # creator createdByKey = loadlib.verifyUser(createdBy, lineNum, fpErrorFile) if createdByKey == 0: continue print('checking for missing data') # if errors, continue to next record # errors are stored (via loadlib) in the .error log if markerKey == 0 \ or mutationKeyList == [] \ or strainOfOriginKey == 0 \ or inheritanceModeKey == 0 \ or alleleTypeKey == 0 \ or alleleStatusKey == 0 \ or transmissionKey == 0 \ or collectionKey == 0 \ or refKey == 0 \ or createdByKey == 0: print('missing data, skipping this line') continue # if no errors, process the allele print('writing to allele file') # allele (isWildType = 0) fpAlleleFile.write('%d|%s|%s|%s|%s|%s|%s|%s|%s|%s|0|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s\n' \ % (alleleKey, markerKey, strainOfOriginKey, inheritanceModeKey, alleleTypeKey, \ alleleStatusKey, transmissionKey, collectionKey, alleleSymbol, alleleName, \ isExtinct, isMixed, refKey, markerStatusKey, \ createdByKey, createdByKey, createdByKey, loaddate, loaddate, loaddate)) # molecular mutation for mutationKey in mutationKeyList: fpMutationFile.write('%s|%s|%s|%s|%s\n' \ % (alleleMutationKey, alleleKey, mutationKey, loaddate, loaddate)) alleleMutationKey += 1 # reference associations # Original fpRefFile.write('%s|%s|%s|%s|%s|%s|%s|%s|%s\n' \ % (refAssocKey, refKey, alleleKey, mgiTypeKey, origRefTypeKey, \ createdByKey, createdByKey, loaddate, loaddate)) refAssocKey = refAssocKey + 1 # Molecular fpRefFile.write('%s|%s|%s|%s|%s|%s|%s|%s|%s\n' \ % (refAssocKey, refKey, alleleKey, mgiTypeKey, molRefTypeKey, \ createdByKey, createdByKey, loaddate, loaddate)) refAssocKey = refAssocKey + 1 # allele subtype for subTypeKey in subTypeKeyList: fpAnnotFile.write('%s|%s|%s|%s|%s|%s|%s\n' \ % (annotKey, annotTypeKey, alleleKey, subTypeKey, \ qualifierKey, loaddate, loaddate)) annotKey = annotKey + 1 # MGI Accession ID for the allele alleleID = '%s%s' % (mgiPrefix, mgiKey) fpAccFile.write('%s|%s|%s|%s|1|%d|%d|0|1|%s|%s|%s|%s\n' \ % (accKey, alleleID, mgiPrefix, mgiKey, alleleKey, mgiTypeKey, \ createdByKey, createdByKey, loaddate, loaddate)) # storing data in MGI_Note # molecular note fpNoteFile.write('%s|%s|%s|%s|%s|%s|%s|%s|%s\n' \ % (noteKey, alleleKey, mgiTypeKey, molecularNoteTypeKey, description,\ createdByKey, createdByKey, loaddate, loaddate)) noteKey = noteKey + 1 # colony ID note fpNoteFile.write('%s|%s|%s|%s|%s|%s|%s|%s\n' \ % (noteKey, alleleKey, mgiTypeKey, colonyIdNoteTypeKey, colonyID, \ createdByKey, createdByKey, loaddate, loaddate)) noteKey = noteKey + 1 # Print out a new text file and attach the new MGI Allele IDs # as the last field fpNewAlleleRptFile.write('%s\t%s\t%s\t%s\t%s\t%s\n' \ % (mgi_utils.prvalue(alleleID), \ mgi_utils.prvalue(alleleSymbol), \ mgi_utils.prvalue(alleleName), \ mgi_utils.prvalue(markerID), \ mgi_utils.prvalue(markerSymbol), \ mgi_utils.prvalue(colonyID))) accKey = accKey + 1 mgiKey = mgiKey + 1 alleleKey = alleleKey + 1 # # Update the AccessionMax value # print('DEBUG: %s' % DEBUG) if DEBUG == 'false': db.sql('select * from ACC_setMax(%d)' % (lineNum), None) db.commit() return 0
def post_db(query, args=(), one=False): db = get_db() db.cursor() db.execute(query, args) db.commit() return db
def getMap(): # # set any official/interim offsets = -1 # if they are currently set to -999 # db.sql(updateSQL, None) db.commit() # # Get all official/interim MGI markers # ignore DNA-MIT markers # # note that this is the genetic chromosome we add to #markers db.sql( '''select m._Marker_key, m.symbol, m.chromosome, a.accid into temp markers from MRK_Marker m, ACC_Accession a where m._Organism_key = 1 and m._Marker_Status_key = 1 and m.chromosome not in ('UN') and lower(m.symbol) not like 'd%mit%' and m._Marker_key = a._Object_key and a._MGIType_key = 2 and a._LogicalDB_key = 1 and a.preferred = 1 and a.prefixPart = 'MGI:' ''', None) db.sql('create index markers_idx1 on markers(_Marker_key)', None) # # copied from mrkcacheload/mrklocation.py # hasOffset = {} genomicChromosome = {} # maps from marker key to genetic chromosome # # offsets for Marker with MAP_Coord_Feature # results = db.sql( '''select distinct m._Marker_key, f.startCoordinate, c.chromosome from markers m, MAP_Coord_Feature f, MAP_Coordinate mc, MRK_Chromosome c where m._Marker_key = f._Object_key and f._MGIType_key = 2 and f._Map_key = mc._Map_key and mc._Object_key = c._Chromosome_key and mc._MGIType_key = 27 -- chromosome ''', 'auto') for r in results: key = r['_Marker_key'] value = r['startCoordinate'] if not hasOffset.has_key(key): hasOffset[key] = [] hasOffset[key].append(value) genomicChromosome[key] = r['chromosome'] # # offsets for Markers w/ Sequence # results = db.sql( '''select distinct m._Marker_key, c.startCoordinate, c.chromosome from markers m, SEQ_Marker_Cache mc, SEQ_Coord_Cache c where m._Marker_key = mc._Marker_key and mc._Qualifier_key = 615419 and mc._Sequence_key = c._Sequence_key ''', 'auto') for r in results: key = r['_Marker_key'] value = r['startCoordinate'] # only one hasOffsetinate per marker if not hasOffset.has_key(key): hasOffset[key] = [] hasOffset[key].append(value) genomicChromosome[key] = r['chromosome'] # # print out the marker/offsets # results = db.sql('select * from markers order by _Marker_key', 'auto') for r in results: key = r['_Marker_key'] # change "X" to "20" chr = r['chromosome'] # if genetic and genomic chromosomes disagree, then we do not want to # generate a cM offset chromosomeMismatch = False if genomicChromosome.has_key(key): if genomicChromosome[key] != chr: chromosomeMismatch = True if chr == 'X': chr = '20' if hasOffset.has_key(key) and not chromosomeMismatch: for c in hasOffset[key]: fpMap.write( str(r['_Marker_key']) + '\t' + r['symbol'] + '\t' + r['accid'] + '\t' + chr + '\t' + str(c) + '\n') else: fpMap.write( str(r['_Marker_key']) + '\t' + r['symbol'] + '\t' + r['accid'] + '\t' + chr + '\t' + 'None' + '\n') return 0
for row in db.execute(dbo, sql): new_keys = json.loads(row[0]).keys() new_keys = set(k.lower() for k in new_keys) keys = keys.union(new_keys) tablename = config.settings["db"]["tablename_fields"] ##sql = 'delete from %s' % tablename ##db.execute(dbo, sql) sql_ins = 'insert or ignore into %s (ts, field) values (?, ?)' % (tablename) sql_upd = 'update %s set ts=? where field=?' % tablename for v in list(keys): logging.warning("adding to table %s the value '%s'" % (tablename, v)) values = (now, v,) db.execute(dbo, sql_ins, values) db.execute(dbo, sql_upd, values) db.commit(dbo) ## remove old records sql = 'delete from %s where ts < (?)' % tablename values = (now,) ##db.execute(dbo, sql, values) ## export csv db.sql2csv(dbo, tablename, tablename=tablename) db.commit(dbo) db.close(dbo)
def processFile(): ''' # requires: # # effects: # Reads input file # Verifies and Processes each line in the input file # # returns: # nothing # ''' global bcpon global biotypeKey global biotypeVocab, biotypeVocabKey global biotypeTerm, biotypeTermKey global mcvTerms, mcvTermKeys global markerType, markerTypeKey global useMCVchildren global primaryMCVTerm, primaryMCVTermKey # For each line in the input file lineNum = 0 for line in inputFile.readlines(): lineNum = lineNum + 1 # Split the line into tokens tokens = line[:-1].split('\t') try: biotypeVocab = tokens[0] biotypeTerm = tokens[1] mcvTerms = tokens[2] primaryMCVTerm = tokens[3] markerType = tokens[4] useMCVchildren = tokens[5] except: errorFile.write('Invalid Line (missing column(s)) (row %d): %s\n' % (lineNum, line)) continue # # skip header # if biotypeVocab == "Source": continue # # sanity checks # errors = sanityCheck(biotypeVocab, biotypeTerm, mcvTerms, primaryMCVTerm, markerType, lineNum) if errors: errorFile.write('\n'.join(errors) + '\n') errorFile.write(str(tokens) + '\n\n') bcpon = 0 continue # # sanity checks passed... # if useMCVchildren == 'yes': useMCVchildren = '1' else: useMCVchildren = '0' for mcvTermKey in mcvTermKeys: outputFile.write('%d|%d|%d|%d|%d|%s|%s|%s|%s|%s|%s\n' \ % (biotypeKey, biotypeVocabKey, biotypeTermKey, mcvTermKey, primaryMCVTermKey, markerTypeKey, useMCVchildren, createdByKey, createdByKey, cdate, cdate)) biotypeKey = biotypeKey + 1 # end of "for line in inputFile.readlines():" outputFile.close() db.commit()
def delete(conn, asset_id): """Remove an asset from the database.""" with db.commit(conn) as c: c.execute(queries.remove, [asset_id])
def setUp(self): self.assertEmpty = functools.partial(self.assertEqual, []) self.conn = db.conn(':memory:') with db.commit(self.conn) as cursor: cursor.execute(assets_helper.create_assets_table)
def put_ticket(db, user): # Check valid block block = request.forms.get("block") if len(block) != 1 or block not in "ABCDEFGHP": abort(400, "Invalid Block") # Check that they're not already taking a class at that block existing_block = query( db, "select class_name from student_schedules where student_id = ? and block = ?", user, block) if len(existing_block) > 0: abort( 400, "You are already taking a class at that block: " + existing_block[0].class_name) name = request.forms.get("name") subsection = request.forms.get("subsection") teacher = request.forms.get("teacher") # Check that the class exists r = query( db, "select * from classes where block = ? and name = ? and subsection = ? and teacher = ?", block, name, subsection, teacher) if len(r) == 0: abort(400, "That class doesn't exist") if len(r) > 1: abort(500, "Database invariant violated") # Check that the user isn't already taking that class at a different block r = query( db, "select block from student_schedules where class_name = ? and teacher = ? and student_id = ?", name, teacher, user) if len(r) > 0: abort(400, "You are already in that class at another block: " + r[0].block) # Check if the user isn't in the maximum number of classes r = query( db, "select count(*) as count from student_schedules where student_id = ?", user)[0].count if r >= 11: abort(400, "You are already in the maximum number of classes") # Check if adding the block would exceed the limit for the class r = query( db, "select remaining_slots, locked, waitlist from classes_avail where name = ? and teacher = ? and block = ?", name, teacher, block)[0] if r.remaining_slots <= 0: abort(400, "That class is full. Please use the waitlist.") if r.locked != 0: abort( 400, "That class is closed to enrollment. It has {} remaining slot{} which will be taken from the waitlist" .format(r.remaining_slots, '' if r.remaining_slots == 1 else 's')) with db: row = commit( db, "insert into student_schedules (student_id, block, class_name, subsection, teacher) values (?, ?, ?, ?, ?)", user, block, name, subsection, teacher) print("Student {} added class: {}, {}, {}, {} - assigned ticket id {}". format(user, block, name, subsection, teacher, row)) # We're now out of slots if r.remaining_slots == 1: query( db, "update classes set locked = 1 where name = ? and teacher = ? and block = ?", name, teacher, block) print("Class got locked: {}, {}, {}, {}".format( block, name, subsection, teacher)) return {"ticket": row}
def increment_stat(name): cur = db.get_cursor() cur.execute('UPDATE stats SET %s=%s+1' % (name, name)) db.commit()
def setNodeState(nodeId, state): import db db = db.get_db() db.execute('UPDATE node SET state = ? WHERE id = ?;', (state, nodeId)) db.commit()
def finalize(self): """ Commits DB changes. """ db.commit()
def init(): conn, c = connect() c.execute("CREATE TABLE users (username TEXT, password TEXT, role TEXT)") commit(conn) disconnect(conn)
def create_answer(question_id, answer): sql = "INSERT INTO answer_option (text, question_id) VALUES (%s, %s)" value = [answer, question_id] cursor = db.cursor() cursor.execute(sql, value) db.commit()
def remove(user): conn, c = connect() c.execute("DELETE FROM users WHERE username=?", (user,)) commit(conn) disconnect(conn)
def clear(): conn, c = connect() c.execute("DELETE FROM users") commit(conn) disconnect(conn)
import forest import db from datetime import datetime f = forest.forest() f.setData('2007-1-21', 345) f.normalize() f.countSimilarity() f.predict() predict_plan = {"description": "test", i"datetime": datetime.today().strftime("%Y-%m-%d %H:%M:%S"), "predict_rate": f.predict_rate, "minRel": f.minRel, "predictNum": f.predictNum} predict_plan_id = db.insertGetId("predict_plan", **predict_plan) predict_plan["plan_id"] = predict_plan_id for comparation in f.forestSimilarity: base = comparation.base base_info = {"pc_id": base.pc_id, "ah_id": base.ah_id, "predict_pc": base.powerConsume['predict'], "date": base.date} comparation_id = db.insertGetId("comparation", **base_info) for similarity in comparation.similarityEntitys: ap = similarity.ap ap_info = {"comparation_id": comparation_id, "pc_id": ap.pc_id, "ah_id": ap.ah_id, "similarity": similarity.similarity, "date": ap.date} db.insertGetId("similarity", **ap_info) db.commit() # print datetime.today().strftime("%Y-%m-%d %H:%M:%S")
def set_personal_information_mobile(user_fbid, input_data): '''把使用者資料上傳到資料庫 Args: user_fbid: 使用者唯一的fbid input_data:包含使用者資料的list ''' fbuser_pk = db.fetch_one('select id from fbuser where fbid = %s', [user_fbid]) if not fbuser_pk: print('{0} Fb user pk not find'.format(fbuser_pk)) return if input_data['education_data']: for data in input_data['education_data']: if data['type'] and data['type'] != '': fbpage_pk = db.fetch_one('call add_fbpage_with_type(%s,%s)', [data['name'], data['type']]) else: fbpage_pk = db.fetch_one('call add_fbpage(%s)', [data['name']]) if data['fbid']: db.exec( 'update fbpage set fbid = %s ,url = %s where id = %s ;', [data['fbid'], data['url'], fbpage_pk]) # 建立user 於何時就讀的關聯 fbuser_has_education_exist = db.exec( 'select 1 from fbuser_has_fbpage ' 'where fbuser_id = %s and fbpage_id = %s', [fbuser_pk, fbpage_pk]) # TODO year_graduation if not fbuser_has_education_exist: db.exec( 'insert into fbuser_has_fbpage(fbuser_id,fbpage_id,type,ctype,begin,end)' ' values (%s,%s,%s,%s,%s,%s)', [ fbuser_pk, fbpage_pk, 'edu', '學歷', data['begin'], data['end'] ]) if input_data['work_data']: for data in input_data['work_data']: fbpage_pk = db.fetch_one('call add_fbpage(%s)', [data['name']]) if data['fbid']: db.exec( 'update fbpage set fbid = %s ,url = %s where id = %s ;', [data['fbid'], data['url'], fbpage_pk]) try: # 建立user 於何時就讀的關聯 fbuser_has_work_exist = db.exec( 'select 1 from fbuser_has_fbpage ' 'where fbuser_id = %s and fbpage_id = %s', [fbuser_pk, fbpage_pk]) # TODO year_graduation if not fbuser_has_work_exist: db.exec( 'insert into fbuser_has_fbpage(fbuser_id,fbpage_id,type,ctype,begin,end,postion)' ' values (%s,%s,%s,%s,%s,%s,%s)', [ fbuser_pk, fbpage_pk, 'work', '工作經歷', data['begin'], data['end'], data['position'] ]) except: print(sys.exc_info()[0]) print(sys.exc_info()[1]) print('++++++++++++++++++++++++++++++++++++++++') print(input_data['work_data']) print('++++++++++++++++++++++++++++++++++++++++') try: db.exec( "update fbuser set gender = %s" ", hometown = %s, relationship = %s, birthday = %s, ename = %s, cname = %s," " bloodtype = %s, last_update = %s, current_city =%s where id = %s", [ input_data['gender'], input_data['hometown'], input_data['relationship'], input_data['birthday'], input_data['ename'], input_data['cname'], input_data['bloodtype'], datetime.now(), input_data['current_city'], fbuser_pk ]) db.exec( 'update fbuser_ext set religious = %s, political_view = %s, other = %s, quotes = %s ' 'where fbuser_id = %s', [ input_data['religious'], input_data['political_view'], input_data['other'], input_data['quotes'], fbuser_pk ]) except: print(sys.exc_info()[0]) print(sys.exc_info()[1]) print(input_data) db.commit()