def push(): config = { 'GCM_API_KEY': variables.get("gcm_api_key", "") } app.config.update(config) client = FlaskGCM() client.init_app(app) with app.app_context(): tokens = variables.get("gcm_tokens", []) if not tokens: log("No devices registered") return playing = variables.get("playing", []) alert = {"artist": playing[0], "album": playing[1], "song": playing[2], "duration": variables.get("song_duration", 0)} # Send to single device. # NOTE: Keyword arguments are optional. res = client.send(tokens, alert, collapse_key='collapse_key', delay_while_idle=False, time_to_live=600)
def dbshell(): from server import app with app.app_context(): from database import db import models print "models and db modules are available" IPython.embed()
def backup_cycle(self): lasttime = self.lastBackupTime now = datetime.datetime.now() compare_time = lasttime.replace(hour=4, minute=35, second=0, microsecond=0) if compare_time > lasttime: next_time = compare_time else: next_time = compare_time + datetime.timedelta(days=1) if now > next_time or self.forceBackup == True: self.forceBackup = False print 'Doing a backup (' + str(datetime.datetime.now()) + ')' with app.app_context(): flask_alchemydumps.autoclean(True) flask_alchemydumps.create() if config.BACKUP_ENABLE_FTP == True: print 'Doing a additional FTP backup' app.config['ALCHEMYDUMPS_FTP_SERVER'] = '' app.config['ALCHEMYDUMPS_FTP_USER'] = '' app.config['ALCHEMYDUMPS_FTP_PASSWORD'] = '' app.config['ALCHEMYDUMPS_FTP_PATH'] = '' flask_alchemydumps.create() app.config['ALCHEMYDUMPS_FTP_SERVER'] = config.ALCHEMYDUMPS_FTP_SERVER app.config['ALCHEMYDUMPS_FTP_USER'] = config.ALCHEMYDUMPS_FTP_USER app.config['ALCHEMYDUMPS_FTP_PASSWORD'] = config.ALCHEMYDUMPS_FTP_PASSWORD app.config['ALCHEMYDUMPS_FTP_PATH'] = config.ALCHEMYDUMPS_FTP_PATH print 'Next backup @' + str(next_time) + ' (' + str(datetime.datetime.now()) + ')' logentry = Action(datetime.datetime.utcnow(), config.NODE_NAME, 'Sync Master', '*****@*****.**', 'Backup database', 'Backup', 'L1', 0, 'Internal') db.session.add(logentry) db.session.commit() self.lastBackupTime = now
def index_csv_2_db(dataset): with app.app_context() : topogram = get_topogram(dataset) # update state machine d = Dataset.query.filter_by(id=dataset["id"]).first() d.index_state = "processing" db.session.commit() for i, row in enumerate(topogram.process()): # if i == 10 : break try : row except ValueError,e : d.index_state = "error line %s"%i db.session.commit() return "error line %s"%i mongo.db[dataset["index_name"]].insert(row) # write row to db # change the state to done d.index_state = "done" db.session.commit() db.session.close() job_done("parsing csv")
def setUp(self): app.config.from_object('config.TestingConfig') self.client = app.test_client() with app.app_context(): init_app() user_datastore.create_user(email='test', password=encrypt_password('test')) db.session.commit()
def test_get_qrcode_svg(self): p = Poster(title='Hello', source_url='http://example.org', download_url='') with app.app_context(): db.session.add(p) db.session.commit() r = self.client.get('/posters/{}.svg'.format(p.id)) assert r.status_code == 200
def client(): db_fd, app.config['DATABASE'] = tempfile.mkstemp() app.config['TESTING'] = True with app.test_client() as client: with app.app_context(): db.init_app(app) yield client os.close(db_fd) os.unlink(app.config['DATABASE'])
def setup_method(self): # Replace server functions with mocks server.preprocess_str2vec = mock_preprocess_str2vec # Push app context for every test ctx = app.app_context() ctx.push() # Create fake resources self.q_classes = 12 self.mock_data = pd.Series(["text{}".format(_) for _ in range(10)]) self.mock_ids = np.array(range(10)) self.mock_database_connection = UnifiedAlchemyMagicMock() self.mock_model = MockCombinedClassifierWrapper( q_classes=self.q_classes) # Assign to app context global g.con = self.mock_database_connection g.data = self.mock_data g.ids = self.mock_ids g.multilabel_model = self.mock_model self.not_dict_input = "this is a string" self.sent = { "GenSendID": 0, "Overskrift": "arbejde med strøm", "NoteProjektBeskriv": "strømarbejde", "NoteForudsaet": "hz", "NoteSafetyK": "strøm", } self.junk = { "GenSendID": 0, "Overskrift": "a", "NoteProjektBeskriv": "b", "NoteForudsaet": "c", "NoteSafetyK": "d", } self.empty = { "GenSendID": 0, "Overskrift": " ", "NoteProjektBeskriv": "", "NoteForudsaet": "\n", "NoteSafetyK": "\n\n", } self.half_empty = { "GenSendID": 0, "Overskrift": "abc", "NoteProjektBeskriv": " ", "NoteForudsaet": " ", "NoteSafetyK": "", }
def test_get_poster(self): p = Poster(title='Hello', source_url='http://example.org', download_url='') with app.app_context(): db.session.add(p) db.session.commit() r = self.client.get('/posters/{}'.format(p.id)) assert r.status_code == 200 assert b'<h1 class="title">Hello</h1>' in r.data
def get(email): from server import app with app.app_context(): result = getUserFromEmail(email) if not result: return None else: password = (result['pass']) return User(email, password, result['username'], result['nom'], result['prenom'], result['telephone'], result['solde'], result['profileImage'], result['preferencesBird'], result['preferencesCat'], result['preferencesDog'])
def client(): app.config['TESTING'] = True test_client = app.test_client() with app.app_context(): db.drop_all() db.create_all() yield test_client
def reset_db(): """Drop existing database and create new one with current model""" os.system('dropdb photos') os.system('createdb photos') with app.app_context(): connect_to_db(app, echo=False) db.create_all() print('Reset db complete!')
def db_get_task(list_id, task_id): ''' Queries the db for a task with the specified id''' query = ''' select * from Tasks where id = ? and list = ? order by id asc ''' with app.app_context(): cur = get_db().cursor() cur.execute(query, [task_id, list_id]) task = Task.fromDict(dict_from_row(cur.fetchone())) return task
def db_delete_list(id): ''' Deletes the list and all it's tasks with the specified id ''' query = ''' DELETE FROM lists WHERE id = ?; ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [id]) db.commit()
def db_delete_file(task_id, filename): ''' Deletes the file with the task_id and filename ''' query = ''' ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [task_id, filename]) db.commit()
def db_create_file(task_id, filename): ''' Inserts a new file ''' query = ''' ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [task_id, filename]) db.commit()
def db_remove_collaborator(list_id, user_id): ''' deletes a collaborator from a list ''' query = ''' ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [list_id, user_id]) db.commit()
def db_add_collaborator(list_id, user_id): ''' adds a new collaborator ''' query = ''' ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [list_id, user_id]) db.commit()
def db_get_task(list_id, task_id): ''' Queries the db for a task with the specified id''' query = ''' SELECT * FROM Tasks WHERE (id = ? AND list = ?) ORDER BY ASC ''' with app.app_context(): cur = get_db().cursor() cur.execute(query, [task_id, list_id]) task = Task.fromDict(dict_from_row(cur.fetchone())) return task
def flask_app(): """A fixture that initializes the flask app.""" create_app(testing_config=test_config) db_instance = SQLAlchemy(app) db_instance.init_app(app) with app.app_context(): db_instance.create_all() yield app db_instance.drop_all()
def client(): app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:' app.config['TESTING'] = True test_client = app.test_client() with app.app_context(): db.drop_all() db.create_all() yield test_client
def db_delete_task(id): ''' Deletes the task with the specified id ''' query = ''' DELETE FROM Tasks WHERE id = ? ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [id]) db.commit()
def db_get_collaborators_for_list(list_id): ''' Returns a list of all collaborators for a list from the database ''' query = ''' ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [list_id]) db.commit() return [dict_from_row(row)['user_id'] for row in cur]
def grab_tweets(tweets, user_id): with API.app_context(): user = User.query.filter_by(id=user_id).first() data = [] for item in tweets: embedding = get_sentence_vector(item.full_text) item = Tweet(text=item.full_text, embedding=embedding, user_id=user.id) user.tweets.append(item) db.session.commit() print(f"{user.screen_name} has {len(user.tweets)} tweets.")
def parse_protocol_async(protocol_id): with app.app_context(): with app.test_request_context("/"): try: protocol = Protocol.first_by_id(protocol_id) if protocol is None: raise Exception("No protocol given. Aborting parsing.") parse_protocol_async_inner(protocol) except Exception as exc: stacktrace = traceback.format_exc() return _make_error(protocol, "Parsing", "Exception", "{}\n\n{}".format(str(exc), stacktrace))
def db_get_filenames_for_task(task_id): ''' Returns a list of all files for a tasks from the database ''' query = ''' ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [task_id]) db.commit() return [dict_from_row(row)['filename'] for row in cur]
def db_create_list(title, owner_id, inbox=False): ''' Creates a new user, if it does not exist yet''' query = ''' INSERT INTO Lists(title, owner, inbox, revision) VALUES (?,?,?,1); ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [title, owner_id, 1 if inbox else 0]) db.commit() return db_get_list(cur.lastrowid)
def db_get_user(email): ''' Queries the db for a user with the specified email''' # TODO: Add SQL query query = ''' ''' with app.app_context(): cur = get_db().cursor() cur.execute(query, [email]) user = User.fromDict(dict_from_row(cur.fetchone())) return user
def db_get_user_by_id(id): ''' Queries the db for the user with the specified id''' # TODO: Add SQL query query = ''' ''' with app.app_context(): cur = get_db().cursor() cur.execute(query, [id]) user = User.fromDict(dict_from_row(cur.fetchone())) return user
def db_delete_file(task_id, filename): ''' Deletes the file with the task_id and filename ''' query = ''' DELETE FROM Uploads WHERE task = ? AND filename = ?; ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [task_id, filename]) db.commit()
def setUp(self): self.db_fd, self.db_filename = tempfile.mkstemp() app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////' + self.db_filename app.config['TESTING'] = True app.testing = True app.config['WTF_CSRF_ENABLED'] = False self.test_client = app.test_client() db.app = app db.init_app(app) with app.app_context(): db.create_all() seed.load_users() seed.load_houses()
def create(app): with app.app_context(): db.create_all() print 'tables created' db.session.add(Carrier('at&t', 'txt.att.net')) db.session.add(Carrier('Verizon', 'vtext.com')) db.session.add(Carrier('T-Mobile', 'tmomail.net')) db.session.add(Carrier('Sprint', 'messaging.sprintpcs.com')) db.session.add(Carrier('US Cellular', 'email.uscc.net')) db.session.add(Carrier('Metro PCS', 'mymetropcs.com')) print 'default carriers added' db.session.commit()
def db_list_exists(list_id): ''' Returns whether a certain list exists''' query = ''' SELECT DISTINCT lists.id AS id FROM lists WHERE lists.id = ? ''' with app.app_context(): cur = get_db().cursor() cur.execute(query, [list_id]) result = dict_from_row(cur.fetchone()) return result.get('id') != None return False
def update_database_from_json(json_filename): with open(json_filename, "r") as file: ads = json.load(file) with app.app_context(): for ad in ads: tmp_ad = dict2model(ad) old_ad = RealtyAd.query.filter_by(id=tmp_ad.id).first() if old_ad is None: db.session.add(tmp_ad) else: copy_fields_values(old_ad, tmp_ad) db.session.commit()
def db_get_task(list_id, task_id): ''' Queries the db for a task with the specified id''' query = ''' SELECT id, title, list, status, description, due, revision FROM tasks WHERE id = ? AND list = ?; ''' with app.app_context(): cur = get_db().cursor() cur.execute(query, [task_id, list_id]) task = Task.fromDict(dict_from_row(cur.fetchone())) return task
def db_create_task(list_id, title): ''' Inserts a new task and returns it ''' query = ''' INSERT INTO Tasks (title, list) VALUES (?, ?) ''' with app.app_context(): db = get_db() cur = db.cursor() cur.execute(query, [title, list_id]) db.commit() return db_get_task(list_id, cur.lastrowid)
def setUp(self): self.db_fd, self.db_filename = tempfile.mkstemp() app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:////" + self.db_filename app.config["TESTING"] = True app.testing = True app.config["WTF_CSRF_ENABLED"] = False self.test_client = app.test_client() db.app = app db.init_app(app) with app.app_context(): db.create_all() seed.load_users() seed.load_listings() seed.load_messages()
def sendemail(): """sends emails and handles scheduling""" msg = Message('Your reminder!', sender=config.ADMINS[0], recipients=config.ADMINS) msg.body = 'text body' msg.html = '''<p> Its time to track your time! Please visit http://localhost:5000/response to fill out your activity tracker form :D </p> <p>To unsubscribe from these emails, please visit http://localhost:5000/response and deselect the "Get Reminder Emails" checkbox </p>''' with app.app_context(): mail.send(msg)
def add_disp(hotel_id, hotel_name, city_id, city_name, disps): with app.app_context(): results = mongo.db.disp.insert_many([ { 'hotel_id': hotel_id, 'hotel_name': hotel_name, 'city_id': city_id, 'city_name': city_name, 'date': datetime.strptime(disp[0], '%d/%m/%Y'), 'available': disp[1] == '1' } for disp in disps ]) print 'Inserted %d disponibilities on %s - %s' % ( len(results.inserted_ids), hotel_name, city_name )
def process_words_co_occurences(dataset, nodes_count=1000, min_edge_weight=50): """ Extract words coocurences from a data set stored mongo and store the resulting graph in another mongo document dataset : a dict representation from a Dataset instance nodes_count : maximum number of nodes in the final graph min_edge_weight : minimum weight for edges to be kept returns : nothing (this function is intended to run on a worker thread) """ with app.app_context() : topogram = get_topogram(dataset) # update state machine d = Dataset.query.filter_by(id=dataset["id"]).first() d.index_state = "processing" db.session.commit() # get records in the db i=0 records= mongo.db[dataset["index_name"]].find() for i,record in enumerate(records): keywords = set(record["keywords"]) # set() to avoid repetitions # compute word graph for word in list(permutations(keywords, 2)) : # pair the words topogram.add_words_edge(word[0], word[1]) print "computing graph ok" print "reducing graph size" words_graph = topogram.get_words_network(nodes_count=1000, min_edge_weight=50) words_graph_json = topogram.export_to_json(words_graph) mongo.db["wordGraphs"].insert({ "name" : dataset["index_name"], "words" :words_graph_json}) print "graph saved in db" # change the state to done d.index_state = "done" db.session.commit() db.session.close() job_done("doing nasty stuff")
def backup_cycle(self): lasttime = self.lastBackupTime now = datetime.datetime.now() compare_time = lasttime.replace(hour=4, minute=35, second=0, microsecond=0) if compare_time > lasttime: next_time = compare_time else: next_time = compare_time + datetime.timedelta(days=1) if now > next_time or self.forceBackup == True: self.forceBackup = False print "Doing a backup (" + str(datetime.datetime.now()) + ")" with app.app_context(): flask_alchemydumps.autoclean(True) flask_alchemydumps.create() if config.BACKUP_ENABLE_FTP == True: print "Doing a additional FTP backup" app.config["ALCHEMYDUMPS_FTP_SERVER"] = "" app.config["ALCHEMYDUMPS_FTP_USER"] = "" app.config["ALCHEMYDUMPS_FTP_PASSWORD"] = "" app.config["ALCHEMYDUMPS_FTP_PATH"] = "" flask_alchemydumps.create() app.config["ALCHEMYDUMPS_FTP_SERVER"] = config.ALCHEMYDUMPS_FTP_SERVER app.config["ALCHEMYDUMPS_FTP_USER"] = config.ALCHEMYDUMPS_FTP_USER app.config["ALCHEMYDUMPS_FTP_PASSWORD"] = config.ALCHEMYDUMPS_FTP_PASSWORD app.config["ALCHEMYDUMPS_FTP_PATH"] = config.ALCHEMYDUMPS_FTP_PATH print "Next backup @" + str(next_time) + " (" + str(datetime.datetime.now()) + ")" logentry = Action( datetime.datetime.utcnow(), config.NODE_NAME, "Sync Master", "*****@*****.**", "Backup database", "Backup", "L1", 0, "Internal", ) db.session.add(logentry) db.session.commit() self.lastBackupTime = now
def main(): print("Create secret key") with open(SECRETKEY_FILE, 'wb') as sk: sk.write(os.urandom(24)) models.DB.create_all() with app.app_context(): user_datastore.create_user(email="root@localhost", password="******") #load exemple data (TODO: will have to be replaced) datafile = codecs.open("phrases.csv", "r", "utf8") datafile.readline() for line in datafile: fields = line.strip().split(",") suggestion = models.Suggestions(question=fields[1], foreign_language=u"國語", sinograms=fields[2], romanization=fields[3], source=fields[4], justification=fields[5]) models.DB.session.add(suggestion) print suggestion, "added to db" models.DB.session.commit()
resource_instance = save_resource(resource, instance) print 'Created %s' % resource ids.append(resource_instance.get_reference()) return ids def init_superuser(): superuser = User(email='super') db.session.add(superuser) global test_resource test_resource = partial(Resource, owner_id=superuser.email) if __name__ == '__main__': from server import app with app.app_context(): init_superuser() init_practitioner() init('Organization') init_conditions() patient_ids = [] sequence_ids = [] gene_names = [] variant_ids = [] for example_file in os.listdir(os.path.join(BASEDIR, 'examples/vcf')): load_vcf_example(os.path.join(BASEDIR, 'examples/vcf', example_file)) sequence_amount = len(sequence_ids) for _ in xrange(10):
def init_db(): with app.app_context(): db = get_db() with app.open_resource('schema.sql', mode='r') as f: db.cursor().executescript(f.read()) db.commit()
def get_conn(cursorclass=pymysql.cursors.Cursor): with app.app_context(): if not hasattr(g, 'db_conn'): g.db_conn = _conn(cursorclass) return g.db_conn
def main(): """ Seed the database """ with app.app_context(): for ratingName in ['Mediocre', 'Aggregious']: db.session.add(Rating(name=ratingName)) db.session.commit()
def delete(app): with app.app_context(): db.drop_all() print 'tables dropped'
def tearDown(self): with app.app_context(): db.session.remove() db.drop_all()