def wrapper(self, *args, **kwargs): #Get the session parameters auth_id = self.request.cookies.get('auth_id', '') session_id = self.request.cookies.get('session_id', '') #Check the db for the session session = Session.GetSession(session_id, auth_id) if session is None: session = Session() session.session_id = Session.MakeId() session.auth_token = Session.MakeId() session.put() # Attach the session to the method self.SessionObj = session #Call the handler. result = method(self, *args, **kwargs) self.response.headers.add_header('Set-Cookie', 'auth_id=%s; path=/; HttpOnly' % str(session.auth_token)) self.response.headers.add_header('Set-Cookie', 'session_id=%s; path=/; HttpOnly' % str(session.session_id)) return result
def on_message(msg, server): text = msg.get("text", "") match = re.findall(r"!status( .*)?", text) if not match: return session = Session() try: realm = session.query(Realm).filter(Realm.name == wowconfig["realm"]).one() status = u"Currently *{0}* is ".format(realm.name) if realm.online: status += "online (last check: {0})".format( humanize.naturaltime(realm.lastchecked) ) status += "\n\n*Battleground status*:\n```" for area in realm.areas: status += "{0} : {1} controlled : {2} : next in {3}\n".format( area.name, area.faction, area.status, humanize.naturaldelta(area.next), ) status += "```" else: status += "offline (last check: {0}, last seen: {1})".format( humanize.naturaltime(realm.lastchecked), humanize.naturaltime(realm.lastseen) ) return status except NoResultFound: return u"No status known on *{0}*".format(wowconfig["realm"])
def syncSeason(self, serie, season): qry = Session.query(Serie).filter_by(path=serie) if qry.count() == 0: raise Exception("No serie linked to %s/%s" % (serie, saison)) serie = qry.one() path = season num = getSeasonNumFromFoldername(season) if num is None: raise Exception("This is not a season (%s)" % season) season = filter(lambda season: season.num == num, serie.seasons) if len(season) == 0: season = Season(num=num, path=path) Session.add(season) serie.seasons.append(season) else: assert 1 == len(season) season = season[0] season.num = num season.path = path episodes = glob(os.path.join(self.basepath, serie.path, season.path, "*")) episodes = filter(lambda episode: os.path.isfile(episode), episodes) for episode in [os.path.basename(e) for e in episodes]: try: self.syncEpisode(serie.path, season.path, episode) except: pass
def make_account(username, password): session = Session() a = Account(username, password) session.add(a) bot = MBotData("testerbot", 0, 0, ) session.commit()
def initialize_device(): """ Set up the Device info into the database. By this time, the HardwareDefinition and SoftwareDefinition entries must be entered. """ session = Session() hd = session.query(HardwareDefinition).one() sd = session.query(SoftwareDefinition).one() device = Device(id=hd.serial_number, interior_sensor=hd.interior_sensor, exterior_sensor=hd.exterior_sensor) device.hardware_version = hd.hardware_version device.software_version = sd.software_version device.database_service = True # database always set to True device.device_service = sd.device_service device.grainbin_service = sd.grainbin_service session.add(device) # set grainbin info if sd.grainbin_service: grainbins = initialize_grainbin(device.id, hd.grainbin_reader_count) device.grainbin_count = len(grainbins) session.add_all(grainbins) session.commit() session.close() return
def do_command (task_id) : try : task_res = get_task_by_id(task_id) if task_res["status"] != 0 : return {"status":-1 , "val":None} task = task_res["val"]["task"] group_id = script_group = task_res["val"]["script_group"].id server_group = task_res["val"]["server_group"] scripts = get_scripts_by_group_id(group_id)["val"] script_list = [] for script in scripts : script_list.append(str(script.location)) server_list = [] for server in server_group.servers : server_list.append(server.dump()) if script_list == [] or server_list == [] : return {"status":-1 , "val":None} task_to_do = {"scripts":script_list , "servers":server_list} success_array , key_code_list = upload_remotefile.do_task(task_to_do) session = Session() taskStatus = session.query(TaskStatus).filter(TaskStatus.task_id == task_id).first() if taskStatus == None : taskStatus = TaskStatus(task_id , json.dumps(success_array) , json.dumps(key_code_list)) taskStatus_id = taskStatus.save_return_id(session) else : taskStatus.success_array = json.dumps(success_array) taskStatus.key_code_list = json.dumps(key_code_list) taskStatus_id = taskStatus.id taskStatus.save(session) session.close() return {"status":0 , "val":taskStatus_id} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def draw_table(self): s = Session() self.orders = s.query(Order).all() s.close() self.ui.orders_table.clear() self.ui.orders_table.setRowCount(1) self.ui.orders_table.setColumnCount(5) self.ui.orders_table.setHorizontalHeaderLabels([QString.fromUtf8('Номер'), QString.fromUtf8('Поломка'), QString.fromUtf8('Дата приемки'), QString.fromUtf8('Клиент'), QString.fromUtf8('Статус')]) #self.ui.orders_table.resizeColumnsToContents() for order in self.orders: data = [] data.append(str(order.id)) data.append(QString.fromUtf8(order.device)) data.append(str(order.get_ordered_date())) data.append(QString.fromUtf8(order.get_client().get_fio())) data.append(QString.fromUtf8(order.get_status(1).to_string())) for i in range(0,5): tableitem = QTableWidgetItem() tableitem.setText(data[i]) tableitem.font = QFont("Arial", 10) tableitem.font.setBold(True) tableitem.textcolor = QColor("black") self.ui.orders_table.setItem(self.ui.orders_table.rowCount() - 1,i,tableitem) self.ui.orders_table.setRowCount(self.ui.orders_table.rowCount()+1) self.ui.orders_table.resizeColumnsToContents()
def update_position(group_id , json_str) : try : session = Session() position_list = json_str.split('|') position_list.remove(position_list[len(position_list)-1]) for i in range(len(position_list)) : position_list[i] = int(str(position_list[i])) sorted_script = session.query(Sorted_Script).filter(Sorted_Script.group_id == group_id).first() sorted_array = json.loads(sorted_script.sorted_array) check = True for item in position_list : if item not in sorted_array : check = False break if len(position_list) != len(sorted_array) : check = False if check == True : sorted_script.sorted_array = json.dumps(position_list) sorted_script.save(session) return {"status":0 , "val":None} else : return {"status":-1 , "val":None} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def main_page(): s = Session() meals = s.query(Meal).all() products = s.query(Product).all() s.close() return render_template('beta.html', meals=meals, products=products)
def main_page(): s = Session() books = s.query(Book).all() authors = s.query(Author).all() s.close() return render_template('beta.html', books=books, authors=authors)
class LogHandlerTests(reader.LogHandler): def __init__(self): self.session = Session() self.tests = {(item.test, item.subtest) for item in self.session.query(Test)} self.new_tests = [] def _insert_test(self, test, subtest): if (test, subtest) not in self.tests: test_obj = {"test": test, "subtest": subtest} self.new_tests.append(test_obj) self.tests.add((test, subtest)) def test_status(self, data): test = self._insert_test(data["test"], data["subtest"]) def test_end(self, data): self._insert_test(data["test"], None) sys.stdout.write("-") sys.stdout.flush() def suite_end(self, data): self.session.bulk_insert_mappings(Test, self.new_tests) self.session.commit() sys.stdout.write(" committing\n")
def delete(self, sessionid): s = Session() with s.transaction: session = s.query(CoachingSession).get(sessionid) if session: s.delete(session) else: self.response.set_status(404)
def get_stored_range(x_min, x_max, y_min, y_max): session = Session() value = session.query(DataMap).filter(and_(DataMap.x >= x_min, DataMap.x <= x_max, \ DataMap.y >= y_min, DataMap.y <= y_max)).all() ret_val = {} for tile in value: ret_val[(tile.x, tile.y)] = tile.value return ret_val
def get_service_group_by_id(group_id) : try : session = Session() group_list = session.query(ServerGroup).all() if group_list == None or len(group_list) == 0 : return {"status":-1 , "val":None} return {"status":0 , "val":group_list} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def get_server_by_id(server_id) : try : session = Session() server = session.query(Server).filter(Server.id == server_id).first() if server == None : return {"status":-1 , "val":None} return {"status":0 , "val":server} except Exception , msginfo : return {"status": -1 , "val":msginfo}
def get_script_group_list() : try : session = Session() group_list = session.query(ScriptGroup).all() if group_list == None or len(group_list) == 0 : return {"status":-1 , "val":None} return {"status":0 , "val":group_list} except Exception , msginfo : return {"status": -1 , "val":msginfo}
def get_script_by_id(script_id) : try : session = Session() script = session.query(Script).filter(Script.id == script_id).first() if script == None : return {"status":-1 , "val":None} return {"status":0 , "val":script} except Exception , msginfo : return {"status": -1 , "val":msginfo}
def login(self): s = Session() res = s.query(Staff).filter_by(login=unicode(self.ui.login.text()), passwd=unicode(self.ui.password.text())).all() if len(res): self.mv = MainWindow(res[0]) self.mv.show() self.close() s.close()
def delete_task(task_id) : try : session = Session() task = session.query(Task).filter(Task.id == task_id).first() if task == None : return {"status":-1 , "val":None} task.delete(session) return {"status":0 , "val":None} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def get_server_group_by_id(group_id) : try : session = Session() group = session.query(ServerGroup).filter(ServerGroup.id == group_id).first() group.servers = group.servers if group == None : return {"status":-1 , "val":None} return {"status":0 , "val":group} except Exception , msginfo : return {"status": -1 , "val":msginfo}
def draw_client_combo(self): combo = self.ui.client s = Session() clients = s.query(Client).all() s.close() combo.clear() for cl in clients: combo.addItem('%i %s %s'%(cl.id, cl.surname, cl.name)) #QObject.connect(self.ui.manufacter_combo, SIGNAL("currentIndexChanged(int)"), self.setManufacter) #self.setManufacter(0)
def get_servers_by_group_id (group_id) : try : session = Session() group = session.query(ServerGroup).filter(ServerGroup.id == group_id).first() if group == None : return {"status":-1 , "val":None} for server in group.servers : server.groups = server.groups return {"status":-1 , "val":group.servers} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def get_all_service() : try : session = Session() service_list = session.query(Service).all() if service_list == None or len(service_list) == 0 : return {"status":-1 , "val":None} for service in service_list : service.groups = service.groups #delay query , so get values before session closes return {"status":0 , "val":service_list} except Exception , msginfo : return {"status": -1 , "val":msginfo}
def delaytasks(): try: os.remove("celerydb.data") except OSError: pass session = Session() users = session.query(User).all() for user in users: start(user)
def get_all_scripts() : try : session = Session() script_list = session.query(Script).all() if script_list == None or len(script_list) == 0 : return {"status":-1 , "val":None} for script in script_list : script.groups = script.groups #delay query , so get values before session closes return {"status":0 , "val":script_list} except Exception , msginfo : return {"status": -1 , "val":msginfo}
def get_all_servers() : try : session = Session() server_list = session.query(Server).all() if server_list == None or len(server_list) == 0 : return {"status":-1 , "val":None} for server in server_list : server.groups = server.groups return {"status":0 , "val":server_list} except Exception , msginfo : return {"status": -1 , "val":msginfo}
def do_request(task_id) : try: session = Session() status = session.query(TaskStatus).filter(TaskStatus.task_id == task_id).first() if status == None : return {"status":-1 , "val":None} success_array = json.loads(status.success_array) key_code_list = json.loads(status.key_code_list) result = upload_remotefile.get_request_result(success_array , key_code_list) return {"status":0 , "val":result} except Exception , msginfo : return {"status":-1 , "val":None}
def get_script_content_by_id(script_id) : try : session = Session() script = session.query(Script).filter(Script.id == script_id).first() if script == None : return {"status":-1 , "val":None} ori_script_file = open(script.location,"r"); content = ori_script_file.read() ori_script_file.close() return {"status":0 , "val":content} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def dump_map_to_db(d_map): session = Session() for t in d_map: x, y, v = t entry = DataMap(x=x, y=y, value=v) session.merge(entry) session.commit() #Base.metadata.create_all(engine)
def remove_service(service_id , group_id): try : session = Session() service = session.query(Service).filter(Service.id == service_id).first() group = session.query(ServerGroup).filter(ServerGroup.id == group_id).first() if server == None or group == None : return {"status":-1 , "val":None} if server in group.servers : group.service.remove(service) group.save(session) return {"status":0 , "val":None} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def get_scripts_by_group_id (group_id) : try : session = Session() group = session.query(ScriptGroup).filter(ScriptGroup.id == group_id).first() ss = session.query(Sorted_Script).filter(Sorted_Script.group_id == group_id).first() if group == None or ss == None: return {"status":-1 , "val":None} sa = json.loads(ss.sorted_array) sorted_array = [] tmp_dict = {} for script in group.scripts : script.groups = script.groups tmp_dict[script.id] = script for script_id in sa : sorted_array.append(tmp_dict[script_id]) return {"status":0 , "val":sorted_array} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def update_script(script_id , name , content , desc) : try : session = Session() ori_script = session.query(Script).filter(Script.id == script_id).first() if ori_script == None : return {"status":-1 , "val":None} ori_script.name = name ori_script.desc = desc tmpfile = open(ori_script.location , "w") content = str(content) content = content.replace('\r\n' , '\n') tmpfile.writelines(content) tmpfile.flush() tmpfile.close() ori_script.save(session) return {"status":0 , "val":script_id} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def create_record(cls, work, annotator, force_create=False, integration=None): """Build a complete MARC record for a given work.""" if callable(annotator): annotator = annotator() if isinstance(work, BaseMaterializedWork): pool = work.license_pool else: pool = work.active_license_pool() if not pool: return None edition = pool.presentation_edition identifier = pool.identifier _db = Session.object_session(work) record = None existing_record = getattr(work, annotator.marc_cache_field) if existing_record and not force_create: record = Record(data=existing_record.encode('utf-8'), force_utf8=True) if not record: record = Record(leader=annotator.leader(work), force_utf8=True) annotator.add_control_fields(record, identifier, pool, edition) annotator.add_isbn(record, identifier) # TODO: The 240 and 130 fields are for translated works, so they can be grouped even # though they have different titles. We do not group editions of the same work in # different languages, so we can't use those yet. annotator.add_title(record, edition) annotator.add_contributors(record, edition) annotator.add_publisher(record, edition) annotator.add_physical_description(record, edition) annotator.add_audience(record, work) annotator.add_series(record, edition) annotator.add_system_details(record) annotator.add_ebooks_subject(record) data = record.as_marc() if isinstance(work, BaseMaterializedWork): setattr(pool.work, annotator.marc_cache_field, data) else: setattr(work, annotator.marc_cache_field, data) # Add additional fields that should not be cached. annotator.annotate_work_record(work, pool, edition, identifier, record, integration) return record
def create_scriptgroup(name) : try : session = Session() group_list = session.query(ScriptGroup).all() name_is_unique = True for group in group_list : if name == group.name : name_is_unique = False break if name_is_unique == True : group = ScriptGroup(name) group_id = group.save_return_id(session) sa = Sorted_Script(group_id , json.dumps([])) sa.save(session) return {"status":0 , "val":group_id} else : return {"status":-1 , "val":"Script group name is not unique!"} except Exception , msginfo : return {"status":-1 , "val": msginfo}
def remove_script(script_id , group_id): try : session = Session() script = session.query(Script).filter(Script.id == script_id).first() group = session.query(ScriptGroup).filter(ScriptGroup.id == group_id).first() if script == None or group == None : return {"status":-1 , "val":None} if script in group.scripts : group.scripts.remove(script) group.save(session) ss = session.query(Sorted_Script).filter(Sorted_Script.group_id == group_id).first() sorted_array = json.loads(ss.sorted_array) sorted_array.remove(int(script_id)) ss.sorted_array = json.dumps(sorted_array) ss.save(session) return {"status":0 , "val":None} return {"status":-1 , "val":None} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def delete_script(script_id) : try : session = Session() script = session.query(Script).filter(Script.id == script_id).first() if script == None : return {"status":-1 , "val": None} for group in script.groups : ss = session.query(Sorted_Script).filter(Sorted_Script.group_id == group.id).first() sorted_array = json.loads(ss.sorted_array) sorted_array.remove(int(script_id)) ss.sorted_array = json.dumps(sorted_array) ss.save(session) while(len(script.groups)>0) : script.groups.pop() script.save(session) script.delete(session) return {"status":0 , "val":script_id} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def get_coordinates(zipcode, miles): session = Session() # To fetch the Geographical coordinates from Zip_geotable z = select([Zip_geo.latitude, Zip_geo.longitude]).where(Zip_geo.zipcode == zipcode) result = session.execute(z) #print result geo_data = result.fetchone() #If zipcode exists in database if geo_data: latitude = geo_data.latitude longitude = geo_data.longitude patient_loc = geopy.Point(latitude, longitude) #print latitude,longitude find_distance(patient_loc, miles) else: print 'Zipcode not found. Please enter a valid zipcode' session.close()
def create_task(name , script_group_id , server_group_id) : try : session = Session() new_task = Task(script_group_id , server_group_id , name) task_id = new_task.save_return_id() if task_id <= 0 : return {"status":-1 , "val":None } return {"status":0 , "val":task_id} except Exception ,msginfo : return {"status":-1 , "val":msginfo}
def add_purchase(self, user, product, quantity, session=None): own_session = False if not session: session = Session() own_session = True purchase = Purchase(user, product, quantity) session.add(purchase) if own_session: session.commit() session.close()
def showAllSongs(): session = Session() print("-" * 10, "Show all songs.", "-" * 10) print("{}\t{}\t{}".format("序号", "名字", "艺人", "专辑", "播放列表", "网易云id")) for song in songs: print(song.to_dict()) print("{}\t{}\t{}\t{}\t{}\t{}".format(song.id, song.name, song.artist, song.album, song.playlist, song.nid)) print("") # 好看
def from_config(cls, library): _db = Session.object_session(library) integration = ExternalIntegration.lookup( _db, ExternalIntegration.MARC_EXPORT, ExternalIntegration.CATALOG_GOAL, library=library) if not integration: raise CannotLoadConfiguration( "No MARC export service is configured for this library") return cls(_db, library, integration)
def login(account_id): session = Session(account_id=account_id) db.session.add(session) db.session.commit() session.account.dormant = False db.session.commit() tasks.fetch_acc.s(account_id).apply_async(routing_key='high') return session
class TestModel(unittest.TestCase): def setUp(self): self.session = Session() def tearDown(self): pass def test_phone(self): p = self.session.query(Phone).filter_by(number=1761166).first() self.assertEqual(p.number, 1761166) self.assertEqual(p.type, 2) self.assertIsInstance(p.region, Region) def test_region(self): r = self.session.query(Region).filter_by(zip_code='100000').first() self.assertEqual(r.zip_code, '100000') self.assertEqual(r.area_code, '010') self.assertEqual(r.city, '北京') self.assertEqual(r.province, '北京') self.assertIsInstance(r.phones, AppenderQuery)
def cron(): gt = (round(time.time()/86400) - config.NOTICE_PERIOD) * 86400 lt = gt + 86400 article_list = Session.query(ArticleModel).filter(lt <= ArticleModel.posttime, ArticleModel.posttime < gt).all() if article_list: names = "\n".join([article.title for article in article_list]) subject = u"文章到期提醒" content = (u"以下%d篇文章已经到期\n%s" % (len(article_list), names)).replace('\n','<br />\n') send_mail(subject, content, config.NOTICE_NAME, config.NOTICE_MAIL) else: print "no articles to be notify"
def create_test_world(): world = add(World(name='Test World')) portal = add(Location(name='Portal', world=world)) plaza = add(Location(name='Plaza', world=world)) hotel = add(Location(name='Old Grand Hotel', world=world)) basement = add(Location(name='Hotel Basement', world=world)) add( Path( start=portal, destination=hotel, description= "YOU ARE IN THE HOTEL. THERE'S A DOOR TO THE BASEMENT IN FRONT OF YOU." )) add( Path( start=plaza, destination=hotel, description= "YOU ARE IN THE HOTEL. THERE'S A DOOR TO THE BASEMENT IN FRONT OF YOU." )) add( Path(start=hotel, destination=plaza, description="YOU ARE IN THE PLAZA, FACING THE HOTEL.")) add( Path( start=hotel, destination=basement, description= "YOU ARE IN THE BASEMENT. THERE ARE STAIRS UP TO THE HOTEL LOBBY BEHIND YOU." )) add( Path( start=basement, destination=hotel, description= "YOU ARE IN THE HOTEL LOBBY. THERE'S AN EXIT TO THE PLAZA IN FRONT OF YOU." )) add( Session(code='TestSession1', active=True, current_location=hotel, previous_location=portal)) db.session.commit()
def get(self): page = int(self.get_argument('page', 1)) per_page_size = 10 session = Session() q_weibo = session.query(Weibo) all_pages = ceil(q_weibo.count() / per_page_size) wb_list = q_weibo.filter().order_by( Weibo.created.desc()).limit(per_page_size).offset( (page - 1) * per_page_size) q_user = session.query(User) user_id_list = (wb.user_id for wb in wb_list) users = {u.id: u for u in q_user.filter(User.id.in_(user_id_list))} self.render('home.html', wb_list=wb_list, users=users, all_pages=all_pages, cur_page=page)
def PUT(self, id): data = web.data() item = json.loads(data) if id == '0': id = None code = item['code'] if ('code' in item) else 'code' title = item['title'] if ('title' in item) else 'title' try: posttime = datetime.datetime.strptime(item['posttime'], '%Y-%m-%d') except (KeyError, ValueError): posttime = datetime.datetime.now() remark = item['remark'] if ('remark' in item) else '' articleobj = ArticleModel(id, code, title, posttime, remark) articledict = { 'code': code, 'title': title, 'posttime': posttime, 'remark': remark } if id: Session.query(ArticleModel).filter( ArticleModel.id == id).update(articledict) else: Session.add(articleobj) Session.commit()
def find_distance(patient_location, miles): session = Session() p = select([Provider.latitude, Provider.longitude]) result = session.execute(p) doc_data = result.fetchall() doc_count = 0 # Checks for each doctor's location in the database for coordinates in doc_data: doc_location = geopy.Point(coordinates.latitude, coordinates.longitude) dist = geopy.distance.distance(patient_location, doc_location).miles if dist < float(miles): doc_count = doc_count + 1 if doc_count == 0: print 'No doctors found near you in ' + str(miles) + ' miles' else: print str(doc_count) + ' doctors found near you in ' + str( miles) + ' miles'
def add_session(self, name, start_time, end_time, deregister_timeframe, early_bird, speaker, capacity, venue): s = Session(name=name, start_time=start_time, end_time=end_time, deregister_timeframe=deregister_timeframe, early_bird=early_bird, speaker=speaker, event=self, capacity=capacity, venue=venue) db.session.add(s) return s
def __init__(self, p_user_id: int): # проверяем, есть ли сессия для этого пользователя # если есть - то загружаем ее, и список ранее найденых книг self.sess = db_session.query(Session).filter( Session.user_id == p_user_id).first() if self.sess == None: # не нашли сессию для этого пользователя self.sess = Session(user_id=p_user_id) db_session.add(self.sess) db_session.commit() self.session_id = self.sess.id self.user_id = p_user_id self.search_result = []
def create_challenge(uuid, duration): msg = "Success" try: session = Session() user = User(greenlist_uuid=uuid, challenge_start_ts=datetime.utcnow(), challenge_end_ts=datetime.utcnow() + timedelta(days=duration * 30), challenge_score=0) session.add(user) session.commit() session.close() except Exception as e: msg = e finally: return msg
def create_script(name , content , desc ) : try : session = Session() filename = hashlib.md5(str(datetime.datetime.now())).hexdigest() newfile = open(BASE_PATH + filename , 'w') content = content.replace('\r\n' , '\n') newfile.writelines(content) newfile.flush() newfile.close() newscript = Script(name , BASE_PATH + filename , desc) script_id = newscript.save_return_id(session) return {"status":0 , "val":script_id} except Exception , msginfo : return {"status":-1 , "val":msginfo}
def auth_change(client_ip, user_agent, uid): """Generates user session.""" session_id, salt = generate_session(client_ip, user_agent) expiration = datetime.now() + timedelta(minutes=SESSION_DURATION) new_session = Session(session_id=session_id, salt=salt, expiration=expiration, user_id=uid) db.session.add(new_session) db.session.commit() return b64encode(session_id)
def add_zipdata(db_enabled, zipcode, latitude, longitude): # if db is enabled, then open a session with the database if db_enabled: session = Session() # create an instance of the Zip_Geo type zip_geo= Zip_geo(zipcode=zipcode, latitude=latitude, longitude=longitude) # if db is enabled, then add to the recordset and commit the txn if db_enabled: session.add(zip_geo) session.commit() session.close() return zip_geo
def weather_to_db(data): session = Session() weather = json.loads(data) print(weather) print(type(weather), len(weather)) timestamp_dt = weather.get('dt') print(timestamp_dt) time_standard_dt = timestamp_convert(timestamp_dt) timestamp_sunrise = weather.get('sys').get('sunrise') time_standard_surise = timestamp_convert(timestamp_sunrise) timestamp_sunset = weather.get('sys').get('sunset') time_standard_sunset = timestamp_convert(timestamp_sunset) kwargs ={ 'coord_lon':weather.get('coord').get('lon'), 'coord_lat':weather.get('coord').get('lat'), 'weather_id':weather.get('weather')[0]['id'], 'weather_main':weather.get('weather')[0]['main'], 'weather_description': weather.get('weather')[0]['description'], 'weather_icon':weather.get('weather')[0]['icon'], 'base':weather.get('base'), 'main_temp': weather.get('main').get('temp'), 'main_feels_like':weather.get('main').get('feels_like'), 'main_temp_max':weather.get('main').get('temp_max'), 'main_temp_min': weather.get('main').get('temp_min'), 'main_pressure':weather.get('main').get('pressure'), 'main_humidity': weather.get('main').get('humidity'), 'visibility': weather.get('visibility'), 'wind_speed': weather.get('wind').get('speed'), 'wind_deg': weather.get('wind').get('speed'), 'clouds_all': weather.get('clouds').get('all'), 'dt':time_standard_dt, 'sys_type':weather.get('sys').get('type'), 'sys_id': weather.get('sys').get('id'), 'sys_country': weather.get('sys').get('country'), 'sys_sunrise':time_standard_surise, 'sys_sunset':time_standard_sunset, 'timezone':weather.get('timezone'), 'city_id':weather.get('id'), 'name': weather.get('name'), 'cod':weather.get('cod'), } row_weather = Weather(**kwargs) session.add(row_weather) session.commit() session.close() return
def save_data_to_post_data(all_posts_url): # Функция сохранения данных в таблице post_data session = Session() for elements in all_posts_url: c1 = Post_data(group_id=elements[0], post_id=elements[1], post_text=elements[2]) session.add(c1) session.commit()
class TestModel(unittest.TestCase): def setUp(self): self.session = Session() def tearDown(self): pass def test_phone(self): p = self.session.query(Phone).filter_by(number=1761166).first() res = p.detail() self.assertEqual(res[0], 1761166) self.assertEqual(res[1], '联通') self.assertEqual(res[2]['zip_code'], '100000') self.assertEqual(res[2]['area_code'], '010') self.assertEqual(res[2]['city'], '北京') self.assertEqual(res[2]['province'], '北京') def test_region(self): p = self.session.query(Region).filter_by(zip_code='100000').first() res = p.content() self.assertEqual(res['zip_code'], '100000') self.assertEqual(res['area_code'], '010') self.assertEqual(res['city'], '北京') self.assertEqual(res['province'], '北京')
def collect_event(self, library, license_pool, event_type, time, old_value=None, new_value=None, **kwargs): if not library and not license_pool: raise ValueError( "Either library or license_pool must be provided.") if library: _db = Session.object_session(library) else: _db = Session.object_session(license_pool) if library and self.library_id and library.id != self.library_id: return CirculationEvent.log(_db, license_pool, event_type, old_value, new_value, start=time)
class ParsePhone(SourcePhone): """ 解析phone.dat phone数据 """ def __init__(self): super(ParsePhone, self).__init__() self.session = Session() def get_data_mapping(self): mapping = {} start_offset = 8 while True: end_offset = start_offset + self.buf[start_offset:-1].find(b'\x00') if not len(self.buf[start_offset:end_offset]) > 1: break record_content = self.buf[start_offset:end_offset].decode() province, city, zip_code, area_code = record_content.split('|') r = Region(province=province, city=city, zip_code=zip_code, area_code=area_code) self.session.add(r) self.session.commit() mapping[start_offset] = r.id start_offset = end_offset + 1 return mapping def get_phone_data(self, mapping): current_offset = self.first_phone_record_offset while current_offset < len(self.buf): buffer = self.buf[current_offset:current_offset + self.phone_fmt_length] number, region_offset, phone_type = struct.unpack( self.phone_fmt, buffer) p = Phone(number=number, type=phone_type, region_id=mapping[region_offset]) self.session.add(p) self.session.commit() current_offset += self.phone_fmt_length return None def main(self): mapping = self.get_data_mapping() self.get_phone_data(mapping)
def save_data_to_comment_data(all_comments): # Функция сохранения данных в таблице comment_data session = Session() for elements in all_comments: c1 = Comment_data(post_id=elements[0], comment_id=elements[1], comment_text=elements[2], likes=elements[3]) session.add(c1) session.commit()