def test_16_skills(self): session = Session() race = session.query(Event).filter_by(name='Race for the Cure').first() dace = EventSkills("Teaching/Tutoring", race.id) lace = session.query(EventSkills).filter_by(event_id=race.id).first() self.assertTrue(dace.skill == lace.skill) session.close()
def test_14_neighborhoods(self): session = Session() race = session.query(Event).filter_by(name='Race for the Cure').first() dace = EventNeighborhoods("Back Bay", race.id) lace = session.query(EventNeighborhoods).filter_by(event_id=race.id).first() self.assertTrue(dace.neighborhood == lace.neighborhood) session.close()
def blogindex(page): try: page = int(page) except: page = 1 if page <= 0: abort(404) s = Session() blogs = s.query(Blog).order_by(desc(Blog.date)) try: if session["username"] != config.username or session["password"] != config.password: raise except: blogs = blogs.filter(Blog.visual == 0) catalog = {} for i in blogs: if i.catalog in catalog: catalog[i.catalog].append(i) else: catalog[i.catalog] = [i,] blog_count = blogs.count() #if blog_count == 0 : # if "username" in session and "password" in session: # return render_template("addpost.html",blog_count = 0) # else: # return render_template("login.html",message = u"你还没有一篇文章 先去登陆吧") page_count = (blog_count + config.blog_per_page - 1) / config.blog_per_page #blogs = s.query(Blog).order_by(desc(Blog.date)).offset((page - 1) * config.blog_per_page).limit(config.blog_per_page) blogs = blogs.offset((page - 1) * config.blog_per_page).limit(config.blog_per_page) return render_template("index.html", blogs = blogs, config = config,page = page, page_count = page_count, catalog = catalog)
def showcatalog(argument,page): try: page = int(page) except: page = 1 if page <= 0: abort(404) s = Session() blogs = s.query(Blog).order_by(desc(Blog.date)) try: if session["username"] != config.username or session["password"] != config.password: raise except: blogs = blogs.filter(Blog.visual == 0) catalog = {} for i in blogs: if i.catalog in catalog: catalog[i.catalog].append(i) else: catalog[i.catalog] = [i,] blogs = blogs.filter(Blog.catalog == argument) blog_count = blogs.count() if blog_count == 1: return redirect(url_for("blog.showpost",pid = blogs.first().pid)) page_count = (blog_count + config.blog_per_page - 1) / config.blog_per_page blogs = blogs.offset((page - 1) * config.blog_per_page).limit(config.blog_per_page) return render_template("catalog.html", blogs = blogs,title = argument, config = config,page = page, page_count = page_count, catalog = catalog,cat = argument)
def getUserDetails(cookie, uid): session = Session() s = session.query(User).filter_by(id=uid).one() if not s: return None return {"id": s.id, "handle": s.handle, "name": s.name, "email": s.email, "admin": s.admin}
def get_task_list(self, start_task_id=0, limit=30, q="", t="", a="", order=db.Task.createtime, dis=db.desc, all=False): session = Session() self._last_get_task_list = self.time() # base query query = session.query(db.Task) # query or tags if q: query = query.filter(db.or_(db.Task.taskname.like("%%%s%%" % q), db.Task.tags.like("%%%s%%" % q))) elif t: query = query.filter(db.Task.tags.like("%%|%s|%%" % t)); # author query if a: query = query.filter(db.Task.creator == a) # next page offset if start_task_id: value = session.query(order).filter(db.Task.id == start_task_id).first() if not value: return [] if dis == db.desc: query = query.filter(order < value[0]) else: query = query.filter(order > value[0]) query = query.filter(db.Task.id < start_task_id) # order or limit if not all: query = query.filter(db.Task.invalid == False) query = query.order_by(dis(order), dis(db.Task.id)).limit(limit) return query.all()
def main(notify): g = nx.Graph() out_filename = "data/subreddits_edged_by_description_links.gexf" parser = HTMLParser() session = Session() query = session.query(Subreddit) dbi = DBIterator(query=query) for subreddit in dbi.results_iter(): sub = subreddit.url.split("/")[2].lower() initialize_node(g, sub) if not subreddit.description_html: continue html = parser.unescape(subreddit.description_html) for linked_sub in find_sub_links(html): if g.has_edge(sub, linked_sub): g[sub][linked_sub]["weight"] += 1 else: g.add_edge(sub, linked_sub, weight=1) nx.write_gexf(g, out_filename)
def export(self): export_dir = join( self.export_base_dir, datetime.now().strftime('%Y-%m-%d_%H-%M-%S') ) try: mkdir(export_dir) except OSError: pass dump_csv(Session.select(Session.name, Session.date), join(export_dir, 'sessions.csv')) dump_csv(Member.select(Member.tag_id, Member.name), join(export_dir, 'members.csv')) for session in Session.select(): dump_csv( SessionAttendance.select( SessionAttendance.session, SessionAttendance.member ).where( SessionAttendance.session == session.id ), join( export_dir, 'session_{}.csv'.format(session.name.replace(' ', '_')) ) ) toast('Successfully exported to \n{}'.format(export_dir)) self.update_interface()
def get_deployment(self): session = Session() d = session.query(Deployment).join(Application).filter(Application.name == self.pargs.name) d = d.filter(Deployment.state == DEPLOYMENT_STATE_BUILT_OK) d = d.order_by(Deployment.id.desc()) target = d.first() return session, target
def note_details(note_id): s = get_session() db_session = Session() note = db_session.query(Note).get(note_id) if note is None: raise HTTPError(404) return template('note/note.html', note=note, can_submit=s.get('authenticated'), site_title=note.title)
class MemberTracker(object): def __init__(self, repository_name, new_members=True, odd_hours=False): self.session = Session(); self.repository_name = repository_name; self.new_members = new_members; @classmethod def doc_name(cls, member_identifier): return "%s" % (member_identifier,) @classmethod def tool_name(cls): return "Detection::Behavior::Members" def check_if_new(self, member_identifier, add_if_new=True, alert_function=None): if member_identifier is None: return False name = MemberTracker.doc_name(member_identifier) tool = MemberTracker.tool_name() repo, did_add = get_one_or_create(self.session, Repo, name=self.repository_name) members_doc, did_add = get_one_or_create(self.session, RepoDocument, repo=repo, tool=tool, name=name) if did_add: members_doc.data = {} if alert_function: alert_function(member_identifier) self.session.commit() return True self.session.commit() return False
def _update_tasks(self, tasks): session = Session() while tasks: nm_list = [] bt_list = [] for task in tasks[:100]: if task.task_type in ("bt", "magnet"): bt_list.append(task.id) else: nm_list.append(task.id) for res in self.xunlei.get_task_process(nm_list, bt_list): task = self.get_task(res['task_id']) if not task: continue task.status = res['status'] task.process = res['process'] if task.status == "failed": task.invalid = True if res['cid'] and res['lixian_url']: task.cid = res['cid'] task.lixian_url = res['lixian_url'] if task.status in ("downloading", "finished"): if not self._update_file_list(task): task.status = "downloading" session.add(task) tasks = tasks[100:] session.commit()
def test_12_interests(self): session = Session() race = session.query(Event).filter_by(name='Race for the Cure').first() dace = EventInterests("Youth", race.id) lace = session.query(EventInterests).filter_by(event_id=race.id).first() self.assertTrue(dace.interest == lace.interest) session.close()
def num_songs_queued(): """Returns the number of songs that are queued""" session = Session() num_songs = session.query(Packet).filter_by( player_name=PLAYER_NAME).count() session.commit() return num_songs
def on_get(self,req,resp,code): users=Session.query(User).all() for user in users: if str(user.code)==str(code): user.emailVerified=True resp.body="SUCCESS" Session.commit()
def main(notify): session = Session() gen = session.query(User) \ .join(UserMeta) \ .filter(User.reddit_id != None, \ UserMeta.has_public_likes == UserMeta.YES, \ UserMeta.is_active != UserMeta.NO) for user in gen: try: praw_user = r.get_redditor(user.name) user.update_from_praw(praw_user) for praw_post in praw_user.get_liked(limit=100): update(session, user, praw_post, 1) for praw_post in praw_user.get_disliked(limit=100): update(session, user, praw_post, -1) user.meta.has_public_likes = UserMeta.YES except praw.requests.exceptions.HTTPError as e: print str(e) if '403' in str(e): user.meta.has_public_likes = UserMeta.NO elif '404' in str(e): user.meta.is_active = UserMeta.NO session.add(user.meta) session.add(user) session.commit() session.close()
def test_08_interests(self): session = Session() doey = session.query(Volunteer).filter_by(name='Joey Wood').first() moey = VolunteerInterests("youth", doey.id) joey = session.query(VolunteerInterests).filter_by(volunteer_id=doey.id).first() #self.assertTrue(moey.interest == joey.interest) session.close()
def on_post(self, req, resp): doc = req.context['doc'] lat = doc['lat'] lon = doc['lon'] date_in = datetime.datetime.utcnow() if lat and lon: user = get_user(req, resp) if lon >= -79.8921061: if lon <= -79.8833942: if lat <= 36.0984408: if lat >= 36.0903956: signin = Signin(date_in=date_in, user=user) user.signedin = True Session.add(signin) Session.commit() resp.status = falcon.HTTP_201 resp.location = '/signins/%s' % (signin.id) req.context['result'] = {"action": "sign in", "result": "success"} else: resp.status = falcon.HTTP_409 req.context['result'] = {"action": "sign in", "result": "failure"} else: resp.status = falcon.HTTP_409 req.context['result'] = {"action": "sign in", "result": "failure"}
def test_12_skills(self): session = Session() doey = session.query(Volunteer).filter_by(name='Joey Wood').first() moey = VolunteerSkills("teaching", doey.id) joey = session.query(VolunteerSkills).filter_by(volunteer_id=doey.id).first() #self.assertTrue(moey.skill == joey.skill) session.close()
def test_10_neighborhoods(self): session = Session() doey = session.query(Volunteer).filter_by(name='Joey Wood').first() moey = VolunteerNeighborhoods("backbay", doey.id) joey = session.query(VolunteerNeighborhoods).filter_by(volunteer_id=doey.id).first() #self.assertTrue(moey.neighborhood == joey.neighborhood) session.close()
def test_14_availability(self): session = Session() doey = session.query(Volunteer).filter_by(name='Joey Wood').first() moey = VolunteerAvailability("Monday", doey.id) joey = session.query(VolunteerAvailability).filter_by(volunteer_id=doey.id).first() #self.assertTrue(moey.day == joey.day) session.close()
def on_post(self, req, resp): doc = req.context['doc'] title = doc['title'] description = doc['description'] from_date = doc['from_date'] to_date = doc['to_date'] from_date_datetime = datetime.datetime.strptime(from_date, "%Y-%m-%d") to_date_datetime = datetime.datetime.strptime(to_date, "%Y-%m-%d") user = get_user(req, resp) event = Event(from_date=from_date_datetime, to_date=to_date_datetime, title=title, description=description, user=user) Session.add(event) Session.commit() resp.status = falcon.HTTP_201 req.context['result'] = {"action": "add event", "result": "success", "event": { 'id': event.id, 'from_date': from_date, 'to_date': to_date, 'title': event.title, 'description': event.description } }
def _initialize_virtual_time(self): """Initializes virtual time to the latest packet arrival time""" session = Session() last_arrived_packet = session.query(Packet).order_by(Packet.arrival_time.desc()).first() if last_arrived_packet: self.virtual_time = last_arrived_packet.arrival_time session.commit()
def addPost(): title = request.form.get('title') year = request.form.get('year') session = Session() m = Movie(title=title, year=year) session.add(m) session.commit() return redirect('/')
def log(user, message): if not LOGGING_ENABLED: return session = Session() message = AuditLogMessage( user=user, message=message, player_name=PLAYER_NAME) session.add(message) session.commit()
def get_album(album): songs = [] if album: session = Session() res = session.query(Song).filter_by(album=album).order_by(Song.tracknumber, Song.path).all() session.commit() songs = [song.dictify() for song in res] return {'query': album, 'results': songs}
def list_movies(): year = request.args.get('year') session = Session() if not year: movies = session.query(Movie).all() else: movies = session.query(Movie).filter(Movie.year == year).all() return render_template('list-movies.html', movies=movies)
def get_skills(id): s = Session() result = [] q = s.query(VolunteerSkills).filter_by(volunteer_id=id) for sk in q: result.append(sk.skill) s.close() return result
def get_neighborhoods(id): s = Session() result = [] q = s.query(VolunteerNeighborhoods).filter_by(volunteer_id=id) for n in q: result.append(n.neighborhood) s.close() return result
def get_interests(id): s = Session() result = [] q = s.query(VolunteerInterests).filter_by(volunteer_id=id) for n in q: result.append(n.interest) s.close() return result
class TestStore(TestCase): def setUp(self): reset_db() self.sess = Session() def tearDown(self): self.sess.close() def test_create_store(self): store = StoreModel(name='testStore') self.assertEqual(store.items.count(), 0) def test_crud(self): store = StoreModel(name='testStore') self.assertEqual(self.sess.query(StoreModel).filter(StoreModel.name=='testStore').count(), 0) self.sess.add(store) self.sess.commit() self.assertEqual(self.sess.query(StoreModel).filter(StoreModel.name=='testStore').count(), 1) self.sess.delete(store) self.sess.commit() self.assertEqual(self.sess.query(StoreModel).filter(StoreModel.name=='testStore').count(), 0) def test_store_relationship(self): store = StoreModel(name='testStore') item = ItemModel(name='testItem', price=12.99, store_id=1) self.sess.add(store) self.sess.add(item) self.sess.commit() items = self.sess.query(ItemModel).filter(ItemModel.store_id==1).all() self.assertEqual(len(items), 1) self.assertEqual(items[0].name, 'testItem') def test_store_json(self): store = StoreModel(name='testStore') expected = { 'name':'testStore', 'items':[], } self.assertEqual(store.json(), expected)
class Parse_Data(object): session = Session() word_movies_dict = {} async def start(self): for movie in self.session.query(MovieModel).all(): res = movie.response tmp = re.search(r"==\s?Plot\s?==.*?[^=]==[^=]", res) if tmp: tmp = tmp[0] tmp = re.sub(r"==\s?Plot\s?==", "", tmp) tmp = initial_cleanup_re.sub(" ", tmp) movie.plot = tmp text = tmp.lower() text = re.sub(r"<ref>.*?</ref>", " ", text) text = re.sub(r"\[\[File.*?\]\]", " ", text) text = re.sub(r"http.*?\s", " ", text) text = text.replace("\\r", " ").replace("\\n", " ") text = re.sub(r"[^\w\s]", " ", text) text = re.sub(r"rt\s", " ", text) text = re.sub(r"rt\t", " ", text) text = re.sub(r"\d+", " ", text) text = " ".join( [word for word in text.split() if word not in stop]) tokenized_text = word_tokenize(text) movie.tokenized_plot = ",".join(tokenized_text) movie.unique_tokenized_plot = ",".join( list(set(tokenized_text))) for word in tokenized_text: tmp = self.word_movies_dict.get(word, []) tmp.append(movie.id) self.word_movies_dict[word] = tmp else: movie.plot = None movie.tokenized_plot = None movie.unique_tokenized_plot = None tmp = re.search(r"released\s*=\s*.*?\\n", res) if tmp: tmp = re.search(r"\d{4}", tmp[0]) if tmp: movie.year = tmp[0] else: movie.year = None else: movie.year = None tmp = re.search(r"budget\s*=.*?\\n", res) if tmp: tmp = tmp[0][tmp[0].find("=") + 1:] index = tmp.find("<ref") if index > -1: tmp = tmp[:index] index = tmp.find("<!--") if index > -1: tmp = tmp[:index] index = tmp.find("(est") if index > -1: tmp = tmp[:index] index = tmp.find('("est') if index > -1: tmp = tmp[:index] movie.budget = tmp.strip() else: movie.budget = None self.session.commit() english_words = "" with open("words.txt") as word_file: english_words = set(word.strip().lower() for word in word_file) self.session.add_all([ WordMoviesModel( movie_ids=",".join( [str(num) for num in self.word_movies_dict[word]]), word=word, ) for word in self.word_movies_dict.keys() if word and ((len(word) > 2) or (word in english_words)) ]) self.session.commit()
def main(): session = Session() for interval in session.query(Interval).all(): print(interval)
def remove_user(self, uid): session = Session() if session.query(Event).filter_by(eid=self.eid).first() == None: return False event = session.query(Event).filter_by(eid=self.eid).first() tdic = event.participants.copy() time.sleep(0.05) del tdic[uid] time.sleep(0.05) event.participants = tdic.copy() time.sleep(0.05) session.commit() session.close() # # tester code: # first_name, last_name, username, password, phone_number, email, address = ( # "jonny", # "peterson", # "uusername", # "ppassword", # "2153234234", # "*****@*****.**", # "24958 Hazelmere Road, Beachwood, OH", # ) # exuser = FunctionUser.from_new( # first_name, last_name, username, password, phone_number, email, address # ) # print( # exuser.uid, # exuser.first_name, # exuser.last_name, # exuser.username, # exuser.password, # exuser.phone_number, # exuser.email, # exuser.address, # ) # first_name, last_name, username, password, phone_number, email, address = ( # "j", # "p", # "uuser", # "ppassw", # "215323", # "*****@*****.**", # "24250 Woodside Ln, Beachwood, OH 44122", # ) # exuser2 = FunctionUser.from_new( # first_name, last_name, username, password, phone_number, email, address # ) # start = datetime(2021, 4, 10) # end = datetime(2021, 4, 11) # exevent = FunctionEvents.from_new("24275 Woodside Ln, Beachwood, OH 44122", 0, "pool party", start, end, True) # print(exevent.eid, exevent.location, exevent.organiser_id, exevent.event_name, exevent.start_time, exevent.end_time) # time.sleep(0.5) # exuser.join_event(0) # exuser2.join_event(0) # exuser.make_request(1, 0) # print(exuser.get_events()) # print(exuser.get_sent_requests()) # print(exuser.get_receiving_requests()) # exuser.update_request(0, "yes") # a = FunctionEvents.from_db(0) # print(a.find_distances(0))
def _db_session(ctx): ctx.db = Session()
import ConfigParser import requests from datetime import datetime, date from pprintpp import pprint as pp from db import Session, User, AppSettings import sqlalchemy.orm.exc import time import mandrill # Config Config = ConfigParser.ConfigParser() Config.read("config.ini") MORPHIO_API_KEY = Config.get("MORPH.IO", "API_KEY") MANDRILL_API_KEY = Config.get("MANDRILL", "API_KEY") db = Session() def get_updated_lobbyists(last_run_date): """Get a list of updated lobbyists from all of the available registers based on their last updated date. Parameters ---------- last_run_date : datetime The date of the last time lobbyist-alert ran. """ response = requests.get( "https://api.morph.io/keithamoss/lobbyists-registers/data.json", params={ "key": MORPHIO_API_KEY,
async def on_reaction_add(self, reaction, user): author = reaction.message.author if reaction.message.id in self.active_updates and reaction.emoji in self.image_reactions and user != self.bot.user: if reaction.emoji == '\U0001F4F7': await self.update_content[4].delete() self.new_url_info = await reaction.message.channel.send('Okay, what\'s the new image url?') def check(m): return m.content.startswith('http') and m.author == user self.msg = await self.bot.wait_for('message', check=check) await self.new_url_info.delete() await self.msg.delete() self.image = self.image_resize(self.msg.content) self.embed = self.embed_message('Image has been changed','Are you sure you want to change the image to this?',self.update_content[2],self.update_content[3]) with open('image.png', 'rb') as f: self.new_url_msg = await self.msg.channel.send(embed=self.embed, file=discord.File(f)) for emoji in self.image_reactions: await self.new_url_msg.add_reaction(emoji) self.update_content = [self.new_url_msg.id,self.msg.content,self.update_content[2],self.update_content[3],self.new_url_msg] self.active_updates.append(self.new_url_msg.id) elif reaction.emoji == '\u270F': await self.update_content[4].delete() self.new_cap_info = await reaction.message.channel.send('Okay, what\'s the new caption?') def check(m): return m.author == user self.msg = await self.bot.wait_for('message', check=check) await self.new_cap_info.delete() await self.msg.delete() self.image = self.image_resize(self.update_content[1]) self.embed = self.embed_message('Caption has been changed','Are you sure you want to change the caption to this?',self.msg.content,self.update_content[3]) with open('image.png', 'rb') as f: self.new_cap_msg = await self.msg.channel.send(embed=self.embed, file=discord.File(f)) for emoji in self.image_reactions: await self.new_cap_msg.add_reaction(emoji) self.update_content = [self.new_cap_msg.id,self.update_content[1],self.msg.content,self.update_content[3],self.new_cap_msg] self.active_updates.append(self.new_cap_msg.id) elif reaction.emoji == '\U0001F4AF': await self.update_content[4].delete() self.new_stats_info = await reaction.message.channel.send('Okay, what are the new stats?') def check(m): self.content = m.content[0] return self.content[0].isdigit() and m.author == user self.msg = await self.bot.wait_for('message', check=check) await self.new_stats_info.delete() await self.msg.delete() self.image = self.image_resize(self.update_content[1]) self.embed = self.embed_message('Stats have been changed','Are you sure you want to change the stats to this?',self.update_content[2],self.msg.content.upper()) with open('image.png', 'rb') as f: self.new_stats_msg = await self.msg.channel.send(embed=self.embed, file=discord.File(f)) for emoji in self.image_reactions: await self.new_stats_msg.add_reaction(emoji) self.update_content = [self.new_stats_msg.id,self.update_content[1],self.update_content[2],self.msg.content.upper(),self.new_stats_msg] self.active_updates.append(self.new_stats_msg.id) elif reaction.emoji == '\u2705': await self.update_content[4].delete() self.new_confirm_info = await reaction.message.channel.send('Okay, updating now!') self.image = self.image_resize(self.update_content[1]) self.embed = discord.Embed(title='Sidebar Image Updated', description='[Click here to view your changes!](http://reddit.com/r/nba)', color=16753920) self.embed.add_field(name="Sidebar caption:", value=self.update_content[2], inline=False) self.embed.add_field(name="Sidebar stats:", value=self.update_content[3], inline=False) self.embed.add_field(name='Command Invoker', value=user.mention) #Update Sidebar Image Code stylesheet = self.subreddit.stylesheet stylesheet.upload('sb','image.png') # Update Sidebar Caption Code stylesheet = self.subreddit.wiki['config/stylesheet'].content_md lines = stylesheet.splitlines() if self.update_content[3] == 'none': stats = [] else: stats = self.update_content[3].upper().split(' ') stats = [" ".join(stats[i:i+2]) for i in range(0, len(stats), 2)] offset = (4-len(stats))+1 if stats: for index,stat in enumerate(stats): for i,line in enumerate(lines): if line.startswith('.side a[href^="/stat'+str(index+offset)+'"]:before {'): lines[i] = '.side a[href^="/stat'+str(index+offset)+'"]:before { content: "'+stat+'"; }' for i,v in enumerate(lines): # Caption if v.startswith('.side .spacer:nth-of-type(1):before'): lines[i+1] = ' content: "'+self.update_content[2]+'";' # Hide/Show stat boxes if v.startswith('.side a[href^="/stat1"] {'): if len(stats) <= 3: lines[i+1] = ' display: none;' elif len(stats) > 3: lines[i+1] = ' display: inline-block;' if v.startswith('.side a[href^="/stat2"] {'): if len(stats) <= 2: lines[i+1] = ' display: none;' elif len(stats) > 2: lines[i+1] = ' display: inline-block;' if v.startswith('.side a[href^="/stat3"] {'): if len(stats) <= 1: lines[i+1] = ' display: none;' elif len(stats) > 1: lines[i+1] = ' display: inline-block;' if v.startswith('.side a[href^="/stat4"] {'): if len(stats) <= 0: lines[i+1] = ' display: none;' elif len(stats) > 0: lines[i+1] = ' display: inline-block;' sheet = "" for line in lines: sheet+=(line+'\n') self.subreddit.stylesheet.update(sheet) await self.new_confirm_info.delete() with open('image.png', 'rb') as f: self.final_msg = await reaction.message.channel.send(embed=self.embed, file=discord.File(f)) # Archive Sidebar Code # Move image to archive folder and add timestamp #filename = 'archive/image'+datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")+'.png' #os.rename('image.png',filename) #Create a new SQL object sb = SidebarImages(stats=self.update_content[3], caption=self.update_content[2],mod=user.name) session = Session() session.add(sb) session.commit() session.close() self.update_content = [self.final_msg.id,self.update_content[1],self.update_content[2],self.update_content[3],self.final_msg] self.active_updates.append(self.final_msg.id) elif reaction.emoji == '\U0001F6AB': await self.update_content[4].delete() self.new_cancel_info = await reaction.message.channel.send('Okay, cancelling the update.') self.embed = discord.Embed(title='Sidebar Image Update Cancelled', description='Type !image to try again.', color=16753920) self.embed.add_field(name='Command Invoker', value=user.mention) await self.new_cancel_info.delete() self.cancel_msg = await reaction.message.channel.send(embed=self.embed) self.update_content = [self.cancel_msg.id,self.update_content[1],self.update_content[2],self.update_content[3],self.cancel_msg] self.active_updates.append(self.cancel_msg.id)
def delete(self): sess = Session() sess.delete(self) sess.commit() sess.close()
from csv import reader from db import Session, engine from rebrickable_db.models import Base from rebrickable_db.models.colors import Color from rebrickable_db.models.inventories import Inventory from rebrickable_db.models.inventory_parts import InventoryPart from rebrickable_db.models.inventory_sets import InventorySet from rebrickable_db.models.part_categories import PartCategory from rebrickable_db.models.parts import Part from rebrickable_db.models.sets import Set from rebrickable_db.models.themes import Theme Base.metadata.drop_all(bind=engine) Base.metadata.create_all(bind=engine) session = Session() data = { 'colors.csv': Color, 'inventories.csv': Inventory, 'inventory_parts.csv': InventoryPart, 'inventory_sets.csv': InventorySet, 'part_categories.csv': PartCategory, 'parts.csv': Part, 'sets.csv': Set, 'themes.csv': Theme, } for filename, typ in data.items(): print('Treating %s ...' % filename) csv_reader = reader(open(os.path.join('rebrickable_data', filename), encoding='utf-8'))
import asyncio from readfile import get_file from db import CepSerial, Session from time import time from aiohttp import ClientSession Session = Session() ms = CepSerial() def save_result(result: dict) -> bool: try: cep = ms.load(result, session=Session) Session.add(cep) Session.commit() return True except Exception: Session.rollback() return False async def fetch(session: ClientSession, url: str) -> dict: headers = {"Accept": "application/json"} async with session.get(url, headers=headers) as response: return await response.json() async def get_cep(cep: int = None, number: int = 0) -> None: url = f"http://cep.la/{cep}" async with ClientSession() as client: result = await fetch(session=client, url=url)
def clear(): Session.remove()
def rollback(): Session.rollback()
def commit(): Session.commit()
def save(self, product: Product): session = Session() session.add(product) session.commit()
class TestUser(TestCase): def setUp(self): reset_db() self.sess = Session() def tearDown(self): self.sess.close() def test_crud(self): user = UserModel(name='testUser', password='******') self.assertEqual(self.sess.query(UserModel).count(), 0) self.sess.add(user) self.sess.commit() self.assertEqual(self.sess.query(UserModel).count(), 1) self.sess.delete(user) self.sess.commit() self.assertEqual(self.sess.query(UserModel).count(), 0)
class Eastwood(object): """ Everything you need to keep safe in the world wide web Retrieves all domains from zonefiles.io - daily, parses them against the target monitoring brands, alerts on similar domains. """ def __init__(self): logging.basicConfig() self.logger = logging.getLogger('Eastwood') self.logger.setLevel(logging.INFO) self.db_max_retries = 3 self.db = Session() """ Load Config and things. """ with open(getenv('CONFIG_PATH', '/src/config/config.json')) as config_data: self.config = json.load(config_data) def get_db_entry(self, record): for i in range(0, self.db_max_retries): try: existing_domain = self.db.query(Domain).filter( Domain.domain == record['domain']).first() except Exception as e: self.logger.info("(Attempt {} Error querying db {}".format( i, e)) continue break return existing_domain def add_db_entry(self, record, similarity): for i in range(0, self.db_max_retries): try: self.db.add(Domain(record['domain'], similarity)) except Exception as e: self.logger.info("(Attempt {} Error adding to db {}".format( i, e)) self.db.rollback() continue self.db.commit() break return def update_db_entry(self, record): for i in range(0, self.db_max_retries): try: entry = self.db.query(Domain).filter( Domain.domain == record['domain']).first() self.db.query(Domain).filter( Domain.id == entry.id).update(record) except Exception as e: self.logger.info("(Attempt {} Error updating {}".format(i, e)) self.db.rollback() continue self.db.commit() break return def send_to_slack(self, record, match=False): # Add URL Defanging to prevent slack crawl. slack_msg = "Similar brand registration detected {}\n```".format( defang(record['domain'])) if match: slack_msg = "*Brand registration detected {}*\n```".format( defang(record['domain'])) for k, v in record.items(): if k == 'domain': continue if len(v) >= 1: if "," in v: v = v.replace(",", "\n ") slack_msg += '\n{}: {}'.format(k.title(), v) slack_msg += '```' data = { 'text': slack_msg, 'username': '******', 'icon_emoji': ':male-detective:' } response = requests.post(self.config['SLACK_WEBHOOK'], data=json.dumps(data), headers={'Content-Type': 'application/json'}) self.logger.debug('Response: ' + str(response.text)) self.logger.debug('Response code: ' + str(response.status_code)) def monitor_brands(self, updates_only=True): if updates_only: ZF_URL = "{}{}{}{}".format(self.config['ZF_URL'], self.config['ZF_API_KEY'], "/updatedata/", self.config['ZF_ZONE']) self.logger.info( "Retrieving only new domains <24hrs: {}".format(ZF_URL)) else: ZF_URL = "{}{}{}{}".format(self.config['ZF_URL'], self.config['ZF_API_KEY'], "/fulldata/", self.config['ZF_ZONE']) self.logger.info( "Retrieving all domains for this zone: {}".format(ZF_URL)) while True: try: r = requests.get(ZF_URL, verify=False, stream=True) for chunk in r.iter_lines(chunk_size=8096): decoded_content = chunk.decode('utf-8') self.logger.debug(decoded_content) cr = csv.reader(decoded_content.splitlines(), delimiter=',') my_list = list(cr) # There's a lot of cleanup to be done here. RE DB Transactions. self.logger.debug("Retrieved {} domains.".format( len(my_list))) for row in my_list: # Generic struct for results to update record. # Since we don't know what we'lll actualy getb ack try: record = { 'domain': row[0], 'nsrecord': row[1], 'ipaddress': row[2], 'geo': row[3], 'webserver': row[5], 'hostname': row[6], 'dns_contact': row[7], 'alexa_traffic_rank': row[8], 'contact_number': row[9], } except IndexError: self.logger.info( "Error parsing result! {}".format(row)) # Remove empty results record = { k: v for k, v in record.items() if v is not None } # Strip TLD to compare domain_name = row[0].split('.')[0] for brand in self.config['MONITORED_BRANDS']: """ check if our brands are in the domain name, at all """ if brand in domain_name: self.logger.info( "Brand name detected: {}".format(record)) self.logger.debug( "Checking if Entry exists in db") existing_domain = self.get_db_entry(record) if not existing_domain: self.add_db_entry(record, 'match') try: self.send_to_slack(record, True) except Exception as e: self.logger.info( "ERROR Sending to slack! {}". format(e.message)) record.update({'alerted': 'True'}) self.update_db_entry(record) """ check levenshtein distance """ if distance(str(domain_name), str(brand)) < 3: self.logger.info( "Similar name (distance): {}".format( record)) self.logger.debug( "Checking if Entry exists in db") existing_domain = self.get_db_entry(record) if not existing_domain: self.add_db_entry(record, 'similar') # if we're backfilling we don't want to spam. if updates_only: try: self.send_to_slack(record) except Exception as e: self.logger.info( "Slack exception {}".format( e.message)) record.update({'alerted': 'True'}) self.update_db_entry(record) except Exception as e: self.logger.info("Error Downloading File!: {}".format(e)) continue self.logger.info("Sleeping for {}..".format( self.config['SLEEP_TIME'])) time.sleep(self.config['SLEEP_TIME'])
def save(self): sess = Session() sess.add(self) sess.commit() sess.close()
def setUp(self): reset_db() self.sess = Session()
def find_by_name(self, name: str): session = Session() product = session.query(Product).filter( Product.name.like(name)).first() session.commit() return product
def get_traffic_usage(period, verbose=False, domain_filter_fns=None): if domain_filter_fns is None: domain_filter_fns = [] db_session = Session() domains = get_all_spider_domains(db_session, verbose) crawl_dates = get_crawl_dates_from_period(period) accounts = db_session.query(Account).filter(Account.enabled == True) accounts = {a.id: a for a in accounts} spiders = db_session.query(Spider)\ .filter(Spider.enabled == True)\ .filter(Spider.account_id.in_(accounts.keys())) res = {} for spider in spiders: if spider.name in SPIDER_EXCEPTIONS: continue spider_domain = domains.get(spider.name) if not spider_domain: if verbose: print "ERROR: couldn't find domain for spider %s" % spider.name continue spider_domain = fix_domain(spider_domain) filter_out = False for fn in domain_filter_fns: if not fn(spider_domain): filter_out = True break if filter_out: continue for crawl_date in crawl_dates: crawl = db_session.query(Crawl)\ .filter(Crawl.spider_id == spider.id)\ .filter(Crawl.status == 'upload_finished')\ .filter(Crawl.crawl_date == crawl_date)\ .first() if not crawl: continue stats = db_session.query(CrawlStats).filter( CrawlStats.crawl_id == crawl.id).first() if not stats: continue if spider.name not in res: res[spider.name] = { 'name': spider.name, 'domain': fix_domain(spider_domain), 'parse_method': spider.parse_method, 'run_period': get_spider_run_period(spider), 'requests': [], 'traffic': [], 'products': [] } # check BSM stats_dict = json.loads(stats.stats_json) try: domain = get_spider_domain_stats_key(stats_dict, spider_domain) except KeyError: res[spider.name]['traffic'].append(float(stats.response_bytes)) res[spider.name]['requests'].append(stats.request_count) else: res[spider.name]['traffic'].append( float(stats_dict['downloader/%s/response_bytes' % domain])) res[spider.name]['requests'].append( stats_dict['downloader/%s/request_count' % domain]) res[spider.name]['products'].append(stats.item_scraped_count) for domain in res: res[domain]['traffic'] = sum( res[domain]['traffic']) if res[domain]['traffic'] else 0 res[domain]['products'] = sum( res[domain]['products']) if res[domain]['products'] else 0 res[domain]['requests'] = sum( res[domain]['requests']) if res[domain]['requests'] else 0 return res
def clear_schedule_cache(): """clears schedule cache stored in db""" session = Session() session.execute("DELETE FROM schedule_cache") session.commit() session.close()
# Import the Declarative objects from the db module. from db import Session, Artists, Albums, Tracks from db import engine import pandas as pd # Instantiate a new Session object which serves as the interface to the database. sess = Session() # Write a query that will select the Artists.name, Albums.title and Tracks.name for "Ozzy Osbourne." # HINT: It will be necessary to join the artists, albums and tracks tables. # There are several ways to join tables in SQLAlchemy and the documentation can be found here: # http://http://docs.sqlalchemy.org/en/latest/orm/query.html#sqlalchemy.orm.query.Query.join query = sess.query(Artists.name, Albums.title, Tracks.name.label('track')).join( Albums, Tracks).filter(Artists.name == 'Ozzy Osbourne') """The equivalant SQL for this query is: "SELECT artists.name, albums.title, tracks.name FROM artists JOIN albums ON artists.artistid = albums.artistid JOIN tracks ON albums.albumid = tracks.albumid WHERE artists.name = 'Ozzy Osbourne'" Because SQLAlchemy knows the tables the fields come from, it isn't necessary to write a FROM clause. Also, as long as the relationships are setup properly in the Declarative objects, joining tables is as simple as specifying the Declaratives. """ # Print the results.
from flask import Flask from flask import render_template, session, request, redirect, send_file, send_from_directory from db import Session from db import set_query, set_user, get_user, get_queries, get_coord, get_query, get_polygon_coords, get_path_coords from functional import clear_errors, init_session from TLogParser import csv_parser, is_tlog, parser from photo_processing.functional import clear_folder, save_img_list, load_img_list from photo_processing.upgrade_qual import photo_page_solution from photo_processing.mapper import MapCreator from photo_processing.water import watering from photo_processing.obj_detection import obj_detection from cv2 import imwrite import datetime db_session = Session() app = Flask(__name__) app.secret_key = '1234567' @app.route('/', methods=['POST', 'GET']) def main_page(): if 'is_logged' in session: pass else: init_session(session) return render_template('main.html', session=session) @app.route('/new-polygon', methods=['POST', 'GET']) def add_task(): if not 'is_logged' in session:
def test_empty_table(self): sheet = pe.get_sheet(session=Session(), table=Pyexcel) assert sheet is not None
def join_event(self, eid, availability="default"): session = Session() event = FunctionEvents.from_db(eid) event.add_user(self.uid, availability) session.close()
def show_ticket_information(message, *ticket_ids): # NOQA: R701, C901 """Redmineのチケット情報を参照する. :param message: slackbotの各種パラメータを保持したclass :param ticket_id: redmineのチケット番号 """ s = Session() channel = message.channel channel_id = channel._body['id'] user = user_from_message(message, s) if not user: return channels = s.query(ProjectChannel.id).filter( ProjectChannel.channels.contains(channel_id)) if not s.query(channels.exists()).scalar(): return redmine = Redmine(REDMINE_URL, key=user.api_key) for ticket_id in ticket_ids: if not ticket_id: continue noteno = None note_suffix = "" if '#note-' in ticket_id: ticket_id, noteno = ticket_id.split('#note-') note_suffix = "#note-{}".format(noteno) try: ticket = redmine.issue.get(ticket_id) except (ResourceNotFoundError, ForbiddenError): botsend(message, RESPONSE_ERROR) return proj_id = ticket.project.id proj_room = s.query(ProjectChannel).filter(ProjectChannel.project_id == proj_id) \ .one_or_none() if not proj_room or channel_id not in proj_room.channels.split(','): botsend( message, NO_CHANNEL_PERMISSIONS.format(ticket_id, channel._body['name'])) return if noteno: description = None # Redmine 側で変更がなければ問題ないけど、 # values には #note-n に相当するidがはいっていないので # id でソートして順番を保証している notes = sorted(ticket.journals.values(), key=lambda d: d['id']) for i, v in enumerate(notes, start=1): if str(i) == noteno: # コメントの本文があれば取得する if v.get('notes'): description = v['notes'] # コメント本文がなかったら書き換えられるよう仮文言としている if not description: description = NO_TEXT else: # デフォルトでは説明欄の本文を使用する description = ticket.description or NO_TEXT text = "#{ticketno}{noteno}: [{assigned_to}][{priority}][{status}] {title}".format( ticketno=ticket_id, noteno=note_suffix, assigned_to=getattr(ticket, "assigned_to", "担当者なし"), priority=getattr(ticket, "priority", "-"), status=getattr(ticket, "status", "-"), title=ticket.subject, ) url = "{}{}".format(ticket.url, note_suffix) sc = message._client.webapi res = sc.chat.post_message(channel_id, "<{}|{}>".format(url, text), as_user=True) sc.chat.post_message(channel_id, description, as_user=True, thread_ts=res.body['ts'])
def __init__(self): self.loop = asyncio.get_event_loop() if any(map(lambda el: el is None, [self.semaphore])): raise AttributeError("Initialize Class-wide variables!") self.session = Session()
def find_by_name(name): sess = Session() store = sess.query(StoreModel).filter(StoreModel.name == name).first() sess.close() return store
class TestStore(testing.TestCase): def setUp(self): super(TestStore, self).setUp() reset_db() self.app = app self.sess = Session() self.simulate_post('/register', params={ 'name': 'test', 'password': '******' }) auth = self.simulate_post('/auth', params={ 'name': 'test', 'password': '******' }, headers={'Content-Type': 'application/json'}) self.headers = { 'Authorization': 'Bearer {}'.format(auth.json['access_token']) } # r = self.simulate_get('/', headers=self.access_token) def tearDown(self): self.sess.close() def test_store_not_found(self): r = self.simulate_get('/store/test', headers=self.headers) self.assertEqual(r.status_code, 404) def test_store_found(self): store = StoreModel(name='testStore') self.sess.add(store) self.sess.commit() r = self.simulate_get('/store/testStore', headers=self.headers) self.assertEqual(r.status_code, 200) self.assertDictEqual({'name': 'testStore', 'items': []}, r.json) def test_store_with_items_found(self): store = StoreModel(name='testStore') self.sess.add(store) self.sess.commit() self.sess.flush() item = ItemModel(name='testItem', price=12.99, store_id=store.id) self.sess.add(item) self.sess.commit() r = self.simulate_get('/store/testStore', headers=self.headers) self.assertEqual(r.status_code, 200) self.assertDictEqual( { 'name': 'testStore', 'items': [{ 'name': 'testItem', 'price': 12.99 }] }, r.json) def test_delete_store(self): store = StoreModel(name='testStore') self.sess.add(store) self.sess.commit() r = self.simulate_delete('/store/testStore', headers=self.headers) self.assertEqual(r.status_code, 200) self.assertDictEqual({'message': 'Store deleted'}, r.json) def test_create_store(self): r = self.simulate_post('/store/test', headers=self.headers) self.assertEqual(r.status_code, 201) self.sess.query(StoreModel).filter(StoreModel.name == 'test').first() self.assertDictEqual({'name': 'test', 'items': []}, r.json) def test_create_duplicate_store(self): self.simulate_post('/store/test', headers=self.headers) r = self.simulate_post('/store/test', headers=self.headers) self.assertEqual(r.status_code, 400) def test_list_store(self): store = StoreModel(name='testStore') self.sess.add(store) self.sess.commit() r = self.simulate_get('/stores', headers=self.headers) self.assertDictEqual({'stores': [{ 'name': 'testStore', 'items': [] }]}, r.json) def test_list_store_with_item(self): store = StoreModel(name='testStore') self.sess.add(store) self.sess.commit() self.sess.flush() item = ItemModel(name='testItem', price=12.99, store_id=store.id) self.sess.add(item) self.sess.commit() r = self.simulate_get('/stores', headers=self.headers) self.assertDictEqual( { 'stores': [{ 'name': 'testStore', 'items': [{ 'name': 'testItem', 'price': 12.99 }] }] }, r.json)
def update_active(_id): """ Публікація опитування, розсилання смс та email з паролями для проходження опитування. В групу Poll :param _id: id опитування """ k = 0 session = Session() res = request.json all_records = session.query(Poll).filter_by(id=_id).all() converter = PollSchema(many=True, only=[ 'id', 'name', 'created', 'count_of_complete', 'total_count', 'status', 'description' ]) result = converter.dump(all_records).data for arg in res['email']: for step in range(0, arg['copy']): k += 1 password = id_generator() dep = from_dep(arg['email']) if dep == []: data = [] qw = {} qw.setdefault('departmentUa', '') qw.setdefault('cityUa', '') data.append(qw) dep = data list_count_poll = session.query(Statistics) \ .filter(Statistics.department == dep[0]['departmentUa']) \ .filter(Statistics.city == dep[0]['cityUa']) \ .filter(Statistics.fk_poll == _id).all() if list_count_poll == []: statistick = Statistics(total_count=1, fk_poll=_id, department=dep[0]['departmentUa'], city=dep[0]['cityUa']) session.add(statistick) else: session.query(Statistics) \ .filter(Statistics.department == dep[0]['departmentUa']) \ .filter(Statistics.city == dep[0]['cityUa']) \ .filter_by(fk_poll=_id).update({"total_count": Statistics.total_count + 1}) passrords = Password(password=password, department=dep[0]['departmentUa'], city=dep[0]['cityUa'], fk_poll=_id) session.add(passrords) send_mail(arg['email'], password) send_sms(res['mobile'], _id) for arg in res['mobile']: k += 1 if int(_id) > 2: session.query(Poll).filter_by(id=_id).update({"status": 'active'}) session.query(Poll).filter_by(id=_id).update( {"total_count": k + int(result[0]['total_count'])}) session.commit() session.close() return "ok"