def load_seed_data(): # load questions for indx, q in enumerate(questions_data): key = q['key'] question_text = q['question_text'] clarification_text = q['clarification_text'] q_type = q['type'] order = indx if q_type == 'yesnoquestion': q = YesNoQuestion(key=key, question_text=question_text, order=order, clarification_text=clarification_text) elif q_type == 'rangequestion': q = RangeQuestion(key=key, question_text=question_text, order=order, clarification_text=clarification_text) elif q_type == 'freeresponsequestion': q = FreeResponseQuestion(key=key, question_text=question_text, order=order, clarification_text=clarification_text) question_set.append(q) db_session.add(q) # load programs programs = [Calfresh(), Medical(), HealthySF(), FreeSchoolMeals(), CAP(), WIC()] for p in programs: db_session.add(p) # commit seed data db_session.commit() db_session.remove()
def report_weather(): log('Checking emails.') Digester(db_session(), host=environ['INREACH_MAIL_HOST'], username=environ['INREACH_MAIL_USERNAME'], password=environ['INREACH_MAIL_PASSWORD']).check_emails() messages = db_session.query(Message).filter( Message.response_sent == False).all() if environ['INREACH_RESPOND_TO_MESSAGES'] == 'YES': for message in messages: log(f'Responding to {message.text_msg_extid} @ {message.latitude}, {message.longitude}' ) query_params = message.query_params() response = Weatherman(message).send_forecast() if response.status_code == 200 and json.loads( response.text)['Success'] is True: # NB: json.loads may throw an error when the POST request was unsuccessful, # since inreach.garmin.com returns an html "Error page", along with status code 200. message.response_sent = True db_session.commit() log('Response sent.') else: log(f'Not responding to new emails.') db_session.remove()
def start(port): init_db() try: app.run(debug=True,host='0.0.0.0', port=port) finally: print("Closing DB...") db_session.remove()
def get_by_id(id, fresh=False): if fresh == True: db_session.remove() sensor =\ db_session.query(Sensor).\ filter(Sensor.id == id).one_or_none() return sensor
def on_success(self, data): if 'text' in data: try: (_, tag, name, address) = data['text'].split('\\', 4) result = Geocoder.geocode(address) (lat, lng) = result[0].coordinates formatted_address = result[0].formatted_address feed = self.get_feed(name.upper(), lat, lng) if feed: # Filter dupes logging.debug('Feed exists: %s' % (feed)) if tag.upper() in self.reply_tags.upper(): feed.state = 'closed' feed.last_modified = datetime.now() db_session.merge(feed) db_session.commit() logging.debug('Feed updated: %s' % (feed)) else: f = Feed(name=name.upper(), lat=lat, lng=lng, address=formatted_address) db_session.add(f) db_session.commit() logging.debug('Feed created: %s' % (f)) except (SQLAlchemyError, DatabaseError) as e: logging.error(e) except ValueError as e: pass except (DBAPIError, Exception) as e: logging.error(e) finally: db_session.remove()
def get_all(fresh=False): """Returns all the nodes in the database """ if fresh == True: db_session.remove() nodes = db_session.query(Node).all() return nodes
def on_success(self, data): if 'text' in data: try: (_, tag, name, address) = data['text'].split('\\', 4) result = Geocoder.geocode(address) (lat, lng) = result[0].coordinates formatted_address = result[0].formatted_address feed = self.get_feed(name.upper(), lat, lng) if feed: # Filter dupes logging.debug('Feed exists: %s' % (feed)) if tag.upper() in self.reply_tags.upper(): feed.state = 'closed' feed.last_modified = datetime.now() db_session.merge(feed) db_session.commit() logging.debug('Feed updated: %s' % (feed)) else: f = Feed(name=name.upper(),lat=lat, lng=lng, address=formatted_address) db_session.add(f) db_session.commit() logging.debug('Feed created: %s' % (f)) except (SQLAlchemyError, DatabaseError) as e: logging.error(e) except ValueError as e: pass except (DBAPIError, Exception) as e: logging.error(e) finally: db_session.remove()
def close_session(exception=None): """ Close the database connection. :param exception: whether an exception was thrown or not during the request """ db_session.remove()
def shutdown_session(exception=None): """ shuts down the database session :param exception: :return: """ db_session.remove()
def main(): try: consumer = KafkaConsumer(KAFKA_TOPIC, bootstrap_servers=KAFKA_BOOTSTRAP_SERVERS, auto_offset_reset='earliest', enable_auto_commit=False, group_id=KAFKA_CONSUMER_GROUP) session = tf.Session(graph=tf.Graph()) with session.graph.as_default(): keras.backend.set_session(session) check_model_files() model = load_model(MODEL_JSON_PATH, MODEL_WEIGHTS_PATH) logger.info("Start listing") for msg in consumer: logger.info(msg) id = msg.value.decode() logger.info("Start processing id : {}".format(id)) process_msg(id, model) consumer.commit() logger.info("Finish processing id : {}".format(id)) finally: logger.info("closing db") db_session.remove()
def close_spider(self, _): """ Close the file object as the spider ends crawling :param _: reference to the spider object (unused) """ if JSON_OUTPUT_FILE is not None: self.graph.dump(JSON_OUTPUT_FILE) db_session.remove()
def get_printers(): """Retrieve all printers in the database :returns: TODO """ db_session.remove() l = [] for printer in db_session.query(Printer): l.append(printer) return l
def get_by_id(id, fresh=False): """Returns a job based on id :id: ID of job to be found :returns: Job if id is found, None if didn't exist """ if fresh == True: db_session.remove() job =\ db_session.query(Job).filter(Job.id == id).one_or_none() return job
def get_by_webid(webid, fresh=False): """Returns a job based on webid :webid: WebID of job to be found :returns: Job if webid is found, None if didn't exist """ if fresh == True: db_session.remove() job =\ db_session.query(Job).\ filter(Job.webid == webid).one_or_none() return job
def get_by_id(id, fresh=False): """Returns a printer based on id :id: ID of printer to be found :returns: Printer if id is found, None if didn't exist """ if fresh == True: db_session.remove() printer =\ db_session.query(Printer).\ filter(Printer.id == id).one_or_none() return printer
def get_by_webid(webid, fresh=False): """Returns a printer based on webid :webid: ID that the web interface uses :returns: Printer if webid is found, None if didn't exist """ if fresh == True: db_session.remove() printer =\ db_session.query(Printer).\ filter(Printer.webid == webid).one_or_none() return printer
def get_by_id(id, fresh=False): """Returns a node based on id :id: ID of node to be found :returns: Node if id is found, None if didn't exist """ if fresh == True: db_session.remove() node =\ db_session.query(Node).\ filter(Node.id == id).one_or_none() return node
def init_db(): from models import User d = User('neo91', '102030') from database import db_session db_session = scoped_session(db_session) db_session.add(d) db_session.commit() db_session.remove()
def db(app, request): """ Returns session-wide initialised database. """ from myapp.models import Base from database import engine assert engine.url.database.split("_")[0] == "test" Base.metadata.create_all(engine) yield from database import db_session db_session.remove() Base.metadata.drop_all(engine)
def set_log(number): logging.basicConfig(filename = '../log/fizzbuzz_' + time.strftime("%Y%m%d") + '.log' , level = logging.DEBUG) # 시작로그 logging.info('Started') request_verb = request.method send_number = None action_verb = None request_sum = None if request_verb == 'POST': send_number = number + 1 # 입력 수에 1를 더함. action_verb = 1; elif request_verb == 'GET': send_number = number action_verb = 0; #DB 설정 try : init_db() except Exception as e : logging.warning('DB Connection Error|MSG:'+Exception) try : #Data 삽입 host = Host(send_number , action_verb) #DB Session 설정 db_session.add(host) db_session.commit() #Data 조회 , 현재까지 입력된 전체 수의 합 query = db_session.query(func.sum(Host.send_number)) request_sum = int(query.scalar()) if request_sum == '' : raise RuntimeError('DB Select Error') db_session.remove() except Exception as e : logging.warning('DB Select Error|MSG:'+Exception) db_session.remove() return json_result(request_sum)
def populate_db(): """ Populates the database with the contents of the 'podcast_data.csv' file """ with open("./db/podcast_data_dump.csv", "r") as csvfile: reader = csv.DictReader(csvfile) for row in reader: p = Podcast(row["name"].decode('utf8'), row["author"], row["title"].decode('utf8'), row["image"], row["url"]) # if data has remote_id property then add to db if "remote_id" in row: p.remote_id = row["remote_id"] db_session.add(p) # commit changes to db db_session.commit() # close db session db_session.remove()
def post(self): data = request.get_json() channel = data.get("channelid") #print(channel) messages = Message.query.filter(Message.channel_id == channel).all() db_session.remove() return make_response(jsonify([ { "id": message.id, "message": message.message, "to_user": message.to_user, "channel_id": message.channel_id, "from_user": message.from_user, "timestamp":message.timestamp, "sentiment": getSentiment(message.message), } for message in messages ]))
def products_outer_parser(outer_text): # print outer_text outer_datas = products_outer_pattern.findall(outer_text) for products_id, url, name, cas_id, suppliers_url_id in outer_datas: try: products_infos = {} resp = _http_call(BASE_URL + url) products_infos['products_id'] = products_id products_infos['name'] = name products_infos['cas_id'] = cas_id products_infos['suppliers_url_id'] = suppliers_url_id products_infos.update(products_inner_parser(resp.text)) products_info = tb_20130112_products_info(**products_infos) print url db_session.add(products_info) db_session.commit() print '已经抓取 ' + products_id except Exception as e: print traceback.format_exc() finally: db_session.remove()
def post(self): request_data = request.get_json() from_user = request_data.get('from_user', '') to_user = request_data.get('to_user', '') #to_user_channel = "private-notification_user_%s" % (to_user) #from_user_channel = "private-notification_user_%s" % (from_user) # check if there is a channel that already exists between this two user channel = Channel.query.filter(Channel.from_user.in_([from_user, to_user])) \ .filter(Channel.to_user.in_([from_user, to_user])) \ .first() if not channel: # Generate a channel... chat_channel = "private-chat_%s_%s" % (from_user, to_user) new_channel = Channel() new_channel.from_user = from_user new_channel.to_user = to_user new_channel.name = chat_channel db_session.add(new_channel) db_session.commit() db_session.remove() else: # Use the channel name stored on the database chat_channel = channel.id data = { "from_user": from_user, "to_user": to_user, #"from_user_notification_channel": from_user_channel, #"to_user_notification_channel": to_user_channel, "channel_name": chat_channel, } # Trigger an event to the other user #pusher.trigger(to_user_channel, 'new_chat', data) return make_response(jsonify(data))
def on_success(self, data): if 'text' in data: try: (_, tag, name, address) = data['text'].split('/') result = Geocoder.geocode(address) (lat, long) = result[0].coordinates formatted_address = result[0].formatted_address feed = self.get_feed(name.upper(), lat, long) if feed: # Filter dupes logging.debug('Feed exists: %s' % (feed)) if tag.upper() in self.reply_tags.upper(): # The flask SQLAlchemy doesn't seem to have # a method for updating. Weird. db_session.delete(feed) db_session.commit() new_feed = Feed(name=name.upper(),lat=lat,long=long, tag='safe',address=formatted_address) db_session.add(new_feed) db_session.commit() logging.debug('Feed updated.') else: f = Feed(name=name.upper(),lat=lat,long=long, address=formatted_address) db_session.add(f) db_session.commit() logging.debug('Feed created name=%s, lat=%s, long=%s.'\ 'address=%s' % (name.upper(), lat, long, formatted_address)) except DisconnectionError as e: print e logging.error(e) db_session.remove() db_session.init() except ValueError as e: pass except (DBAPIError, Exception) as e: logging.error(e)
def deepgram_upload(): """ Uploads any new podcasts to Deepgram API. New podcasts are the ones where the 'remote_id' field is empty """ dg = Deepgram(DEEPGRAM_API_KEY) podcasts_to_upload = Podcast.query.filter(Podcast.remote_id == "").all() for podcast in podcasts_to_upload: upload_response = dg.upload(podcast.url, ["podcast"]) # check status while True: status_response = dg.check_status(upload_response["contentID"]) if status_response["status"] == "fetch": # sleep 1 sec sleep(1) continue elif status_response["status"] in [ "transcode", "chunk", "awaiting_gen_lattice" ]: # # TODO: periodically check the server until is done. (5 secs) # break elif status_response["status"] == "done": break # update podcast with new remote_id podcast.remote_id = upload_response["contentID"] db_session.add(podcast) # commit changes to db db_session.commit() # close db session db_session.remove()
def shutdown_session(exception=None): from database import db_session db_session.remove()
def post(self): action = request.form.get('action') comment = request.form.get('comment') if action == 'add_comment': data = {'departments': Department.query.all(), 'comment': comment} if settings.XSRF_COOKIES: data['csrf_token'] = generate_csrf_token('register') return self.render_template('form.html', **data) else: """ if 'user_id' in session: logging.info('have user') return redirect(url_for('thanks')) if not 'comment' in session: logging.info('not comment') return redirect(url_for('home')) """ #comment = request.form['comment'] """ if not comment: logging.error('not comment') return redirect(url_for('home')) if len(comment) > 140: logging.error('not size comment') return redirect(url_for('home')) """ if settings.XSRF_COOKIES: csrf_protect('register') form = RegisterForm(request.form) form.email_exists.data = bool(User.query.filter_by( email=form.email.data).count()) form.dni_exists.data = bool(User.query.filter_by( dni=form.dni.data).count()) form.cod_dpto.query = Department.query.all() #form.comment.data = session.get('comment') #form.comment.data = comment if form.validate(): user = User() form.populate_obj(user) user.cod_dpto = form.cod_dpto.data.id user.enabled = True db_session.add(user) try: db_session.commit() except Exception as exc: logging.error(exc) db_session.rollback() db_session.remove() return redirect(url_for('thanks')) else: db_session.remove() session['user_id'] = str(user.id) return redirect(url_for('thanks')) else: logging.error(form.errors) return redirect(url_for('thanks'))
def main(): db_init() link_counter = 0 course_counter = 0 links = get_category_links() for link in links: link_counter += 1 print("########## PROCESSING COURSE CATALOG ENTRY {0} ##########".format(link_counter)) courses = get_course_mappings(link) for course in courses: course_counter += 1 print("########## PROCESSING COURSE {0} ##########".format(course_counter)) courseinfo = parse_courseinfo(course) #print("COURSE INFO: {0}".format(courseinfo)) # Skip courses without instructors if(courseinfo.get('instructor')) is None: continue inames = courseinfo.get('instructor') #print("INSTRUCTOR: {0}".format(inames)) #print("DEPARTMENT: {0}".format(courseinfo.get('dept'))) instructor_query = User.query.filter( User.lname == inames.get('lname')).filter( User.fname.startswith(inames.get('fname')[0])) if len(instructor_query.all()) == 1: instructor = instructor_query.first() else: instructor = instructor_query.filter(User.dept.like(courseinfo.get('dept'))).first() #print("INSTRUCTOR QUERY BY NAME/DEPT: {0}".format(instructor)) if instructor is None: idict = get_instructor_info(courseinfo) print("COURSE INFO: {0}".format(courseinfo)) print("INSTRUCTOR INFO: {0}".format(idict)) if not idict: continue #print("ONID: {0}".format(idict.get('ONID Username'))) instructor = User.query.filter(User.onid == idict.get('ONID Username')).first() #print("INSTRUCTOR QUERY BY ONID: {0}".format(instructor)) if instructor is None: instructor = instructor_dict_to_model(idict) db_session.add(instructor) event = Event.query.filter( Event.user.any(onid=instructor.onid)) .filter( Event.crn == courseinfo.get('crn')).filter( Event.term == courseinfo.get('term')).filter( Event.sec == courseinfo.get('sec')).first() if event is None: #print("COURSEINFO: {0}".format(courseinfo)) event = courseinfo_to_model(courseinfo) instructor.events.append(event) db_session.commit() db_session.commit() db_session.remove()
from database.models import Reminder try: client = TwilioRestClient(TWILIO_ACCOUNT, TWILIO_SECRET) except TwilioRestException: exit(0) hour = datetime.now().hour today = date.today() try: reminders = db_session.query(Reminder).filter(Reminder.time == hour, or_(Reminder.last_called == None, cast(Reminder.last_called, Date) != today)).all() except NoResultFound: exit(0) for reminder in reminders: try: client.calls.create(to = reminder.phone_number, from_ = choice(TWILIO_NUMBERS), url = CALLBACK_BASE_URL + "/act/gather-reminder-call/", if_machine = "Hangup") reminder.last_called = datetime.now() reminder.times_called = reminder.times_called + 1 except TwilioRestException: pass sleep(2) db_session.commit() db_session.remove()
def after_request(response): """Closes the database again at the end of the request.""" db_session.remove() return response
def shutdown_session(_=None): """ Close db_session when closing the server """ db_session.remove()
db_session.commit() logging.debug('Feed created name=%s, lat=%s, long=%s.'\ 'address=%s' % (name.upper(), lat, long, formatted_address)) except DisconnectionError as e: print e logging.error(e) db_session.remove() db_session.init() except ValueError as e: pass except (DBAPIError, Exception) as e: logging.error(e) # Want to disconnect after the first result? # self.disconnect() def on_error(self, status_code, data): logging.error(status_code, data) if __name__ == '__main__': s = SalvaVida() try: s.run() except KeyboardInterrupt: db_session.remove() # sv_daemon = runner.DaemonRunner(s) # sv_daemon.do_action()
def shutdown_session(exception=None): if exception: db_session.remove() else: db_session.commit()
def teardown(exception=None): db_session.remove()
def shutdown_session(exception=None): db_session.remove()
def shutdown_session(exception=None): ''' Automatically removes database sessions at the end of the request or when the application shuts down. ''' session.remove()
def call_after_request_callbacks(response): try: db_session.remove() except Exception as e: print(e) return response
def shutdown_session(response): db_session.remove() return response
def tearDownClass(cls): db_session.remove()
def shutdown_session(exception=None): """Automatically closes the databases connection.""" db_session.remove()
def shutdown_session(exception=None): db = getattr(g, '_database', None) if db: db_session.remove()
def get_all(fresh=False): if fresh == True: db_session.remove() printers = db_session.query(Printer).all() return printers
def remove_db_session(exception=None): db_session.remove()
def shutdown(error): db_session.remove()
def see_trand(): gye_list = Gye.query.all() db_session.remove() return render_template("trand_gye.html",list=gye_list)
def tearDown(self): db_session.remove()
def shutdown_session(exception=None): db_session.remove()
def after_request(response): db_session.remove() return response