def _generate_grid(self, w, h, use_real_words=False, textfile="texts/words.txt"): """ Generate a grid of random characters. This will be one of the possible solutions to the crossword. """ grid = [] if use_real_words: with current_app.open_resource(textfile) as f: used_words = set() text = f.read().split() s = "" word = random.choice(text).strip().upper() while len(s) < w * h: if word.isalnum() and word not in used_words: s += word used_words.add(word) word = random.choice(text).strip().upper() for i in xrange(h): grid.append(s[:w]) s = s[w:] else: for i in xrange(h): grid.append("".join(random.sample(self.ALLOWED_CHARACTERS, w))) return grid
def load_page(path): """Читает и разбирает файл страницы.""" with current_app.open_resource(path) as page: title = page.readline().decode('utf-8') content = page.read().decode('utf-8') return title, content
def load_page(path): """Читает и разбирает файл.""" with current_app.open_resource(path) as f: page = Page() CONTENT = False content = [] for line in f: if line == '\r\n': continue line = line.decode('utf-8') if CONTENT: content.append(line) continue if line.startswith("CONTENT:"): CONTENT = True continue print line spl = line.split("=") print spl page.__dict__[spl[0].strip()] = spl[1] page.content = ''.join(content) return page
def create(): """ Creates and initializes the SQLite database from the schema file. """ with current_app.app_context(): connect() with current_app.open_resource(schema_name, mode="r") as f: g.sqlite_connection.cursor().executescript(f.read()) add_user({ "username": "******", "password_hash": generate_password_hash("password"), "email": "*****@*****.**", "permissions": "administrator" }) add_user({ "username": "******", "password_hash": generate_password_hash("password"), "email": "*****@*****.**", "permissions": "author" }) add_category({"name": "Uncategorized"}) add_post({ "author": "author", "creation_date": str(datetime.now()).split(".")[0], "category": category_id("Uncategorized"), "title": "Test Post", "body": "<p>Your bones don't break, mine do. That's clear. Your cells react to bacteria and viruses differently than mine. You don't get sick, I do. That's also clear. But for some reason, you and I react the exact same way to water. We swallow it too fast, we choke. We get some in our lungs, we drown. However unreal it may seem, we are connected, you and I. We're on the same curve, just on opposite ends.</p><p>The path of the righteous man is beset on all sides by the iniquities of the selfish and the tyranny of evil men. Blessed is he who, in the name of charity and good will, shepherds the weak through the valley of darkness, for he is truly his brother's keeper and the finder of lost children. And I will strike down upon thee with great vengeance and furious anger those who would attempt to poison and destroy My brothers. And you will know My name is the Lord when I lay My vengeance upon thee.</p><p>The path of the righteous man is beset on all sides by the iniquities of the selfish and the tyranny of evil men. Blessed is he who, in the name of charity and good will, shepherds the weak through the valley of darkness, for he is truly his brother's keeper and the finder of lost children. And I will strike down upon thee with great vengeance and furious anger those who would attempt to poison and destroy My brothers. And you will know My name is the Lord when I lay My vengeance upon thee.</p>" }) create_about("New Site", "This is a blank description.") create_site_avatar("/static/face.png") g.sqlite_connection.commit()
def send_mail(snap_id, group_id): group_document = get_group_document(group_id) if ('email_recipients' in group_document and 'send_email_contents' in group_document and group_document['email_recipients'] and group_document['send_email_contents']): pics_have_been_attached = False subject = "pictures from snap {0}".format(snap_id) recipients = group_document['email_recipients'].split(',') sender_addr = os.environ.get('MAIL_USERNAME') msg = Message(subject, sender=sender_addr, recipients=recipients) msg.body = "this is the image for snap id {0}\n\n".format(snap_id) pictures = find_pictures({'snap_id': str(snap_id)}) picture_types = group_document['send_email_contents'].split(',') for pic_id in pictures.keys(): if pictures[pic_id]['source'] in picture_types: pic_name = build_picture_name(pic_id) pic_path = build_picture_path(picture_name=pic_name, snap_id=snap_id) with current_app.open_resource(pic_path) as fp: msg.attach(pic_name, "image/jpeg", fp.read()) pics_have_been_attached = True if pics_have_been_attached: mail.send(msg)
def render_template(name, **extra): path = os.path.join("templates", name) with current_app.open_resource(path) as f: content = f.read() for k, v in extra: content.replace("$(%s)" % k, v) return content
def get_thumbnail(size, posterPath): # width and height are separated by an 'x' (e.g. 187x275) splittedSize = size.split('x') if len(splittedSize) != 2 or not splittedSize[0].isdigit() or not splittedSize[1].isdigit(): abort(400) width, height = [int(component) for component in splittedSize] f = current_app.open_resource(posterPath) img = Image.open(f) lastModified = int(os.path.getmtime(f.name)) # caching if request.if_modified_since and datetime.fromtimestamp(lastModified) <= request.if_modified_since: return Response(status=304) img.thumbnail((width, height), Image.ANTIALIAS) thumbnail = StringIO() try: img.save(thumbnail, 'JPEG', quality=95, optimize=True, progressive=True) except IOError: # http://stackoverflow.com/questions/6788398/how-to-save-progressive-jpeg-using-python-pil-1-1-7 ImageFile.MAXBLOCK = img.size[0] * img.size[1] img.save(thumbnail, 'JPEG', quality=95, optimize=True, progressive=True) thumbnail.seek(0) res = send_file(thumbnail, mimetype='image/jpeg', cache_timeout=0) res.last_modified = time.localtime(lastModified) return res
def topic(topic): try: with current_app.open_resource('content/help/%s.md' % topic) as f: content = f.read() return render_template('help/topic.html', content=content) except Exception as e: print e abort(404)
def get_countries(): """Load countries from JSON file.""" reader = codecs.getreader('utf-8') with current_app.open_resource('resources/countries.json') as f: records = json.load(reader(f)) return [Country(**record) for record in records]
def create_db(): with current_app.app_context(): db = get_db() sql = '' with current_app.open_resource('database.sql', mode='r') as f: sql = f.read() db.cursor().executescript(sql) db.commit()
def import_blacklist(): with app.open_resource('aerodb/blacklist.txt') as blacklist: for line in blacklist: line = line.strip() if not line.startswith('#') and len(line) == 3: aerodrome = lookup(line) if aerodrome is not None: aerodrome.blacklist = True db.session.commit()
def render_page(name): path = os.path.join('pages', name) with current_app.open_resource(path) as file: text = file.read().decode('utf8') md = Markdown(extensions=['meta']) md.treeprocessors["flask_links"] = MarkdownFlaskUrlProcessor(md) html = md.convert(text) title = ' '.join(md.Meta.get('title', [])) return render_template('page.html', content=html, title=title)
def send(self): msg = Message(self.subject, recipients=list(self.recipients), body=self.text_body, html=self.html_body) if self.attachments: for file_name, file_type, file_path in self.attachments: with current_app.open_resource(file_path) as fp: msg.attach(file_name, file_type, fp.read()) mail.send(msg) self.delete()
def get_asset_path(asset_type, filename): manifest_filename = os.path.join( current_app.static_folder, 'dist', asset_type, 'rev-manifest.json' ) try: with current_app.open_resource(manifest_filename, 'rb') as manifest: data = json.load(manifest) asset_filename = os.path.join('dist', asset_type, data[filename]) return asset_filename except FileNotFoundError: return os.path.join('dist', asset_type, filename)
def upgrade_db(): db = get_db() current_version = get_db_version() new_version = current_version + 1 filename = os.path.join(current_app.root_path, 'upgrade_db', DB_UPGRADE_FILE_NAME_PATTERN.format(current_version, new_version)) if os.path.isfile(filename): with current_app.open_resource(filename, mode='r') as f: sql = f.read() if sql: db.cursor().executescript(sql) db.execute('update settings set value=? where key=?', [new_version, 'version']) db.commit() return new_version return current_version
def send_mail(subject, body, recipients, attachment=None, mimetype=None): """ Send formatted html emails :param subject: The subject of the email :param body: The body of the email :param recipients: Recipients to send the email to :param attachment: Path of the attachment to send :param mimetype: Two-part identifier format for attachment. Example: application/zip :return: """ msg = Message(subject=subject, recipients=recipients) msg.html = replace_for_html(body) if attachment and mimetype: with current_app.open_resource(attachment) as fp: msg.attach(attachment, mimetype, fp.read()) mailer.send(msg)
def compile_asset(asset_path): relative_path, absolute_path = get_filesystem_paths(asset_path) with current_app.open_resource(relative_path) as fp: file_contents = fp.read() if asset_path.endswith(".styl"): content = stylus_compiler.compile(file_contents) content_type = "text/css" elif asset_path.endswith(".coffee"): content = coffeescript.compile(file_contents) content_type = "application/javascript" else: content = file_contents content_tuple = mimetypes.guess_type(asset_path) content_type = content_tuple[0] or "text/plain" fingerprint = hashlib.md5(content.encode("utf-8")).hexdigest() last_modified = path.getmtime(absolute_path) return CompiledAsset(content, fingerprint, last_modified, content_type)
def sendmail(): db = conn() # filename = os.path.join(current_app.config['DOWNLOAD'],request.form['filename']) filepath = os.path.join(ROOT_FOLDER, current_app.config['DOWNLOAD']) recipient = request.form['recipient'].split(',') msg = Message(request.form['subject'], sender=current_app.config['MAIL_SENDER'], recipients=recipient) msg.html = request.form['wysiwyg_html'] filename = filepath + request.form['attachments'].split('.')[0]+'.xlsx' with current_app.open_resource(filename.strip()) as fp: msg.attach(request.form['subject'], "application/vnd.ms-excel", fp.read()) print('start:') # never forget to get the current_app because of the blueprint app = current_app._get_current_object() thr = Thread(target=send_async_email, args=[app, msg]) thr.start() data = db['ManagerInfo'].find().sort('_id') return render_template('upload.html', data=data)
def init_db(): """Execute main database schema.""" db = get_db() with current_app.open_resource("schema.sql") as f: db.execute(f.read().decode("utf-8"))
def init_db(): db = get_db() with current_app.open_resource('schema.sql') as f: db.execute(f.read().decode('utf8'))
def init_db(): db = get_db() with current_app.open_resource('schema.sql',mode='r') as f: db.cursor().executescript(r.read()) db.commit()
def osrc(): if not "access_token" in session: return redirect("/login") osrc_raw = raw_osrc_data() # Load the list of adjectives. with current_app.open_resource("json/adjectives.json") as f: adjectives = json.load(f) # Load the list of languages. with current_app.open_resource("json/languages.json") as f: language_list = json.load(f) # Load the list of event action descriptions. with current_app.open_resource("json/event_actions.json") as f: event_actions = json.load(f) # Compute the name of the best description of the user's weekly schedule. with current_app.open_resource("json/week_types.json") as f: week_types = json.load(f) # Load the list of event verbs. with current_app.open_resource("json/event_verbs.json") as f: event_verbs = json.load(f) # most used language used_languages = osrc_raw["cumulative_languages"].keys() if len(used_languages) > 0: most_used_language = max(osrc_raw["cumulative_languages"].iteritems(), key=operator.itemgetter(1))[0] else: most_used_language = None # events events_counter = dict() count = 0 for event in osrc_raw["events"]: if not event["type"] in events_counter: events_counter[event["type"]] = 1 else: events_counter[event["type"]] += 1 most_done_event = max(events_counter.iteritems(), key=operator.itemgetter(1))[0] best_dist = -1 week_type = None user_vector = osrc_raw["nomralized_events_vector"] for week in week_types: vector = week["vector"] norm = 1.0 / math.sqrt(sum([v * v for v in vector])) dot = sum([(v*norm-w) ** 2 for v, w in zip(vector, user_vector)]) if best_dist < 0 or dot < best_dist: best_dist = dot week_type = week["name"] # Figure out the user's best time of day. with current_app.open_resource("json/time_of_day.json") as f: times_of_day = json.load(f) hours = osrc_raw["events_hours_vector"] best_time = None max_val = 0 for i in range(len(hours)): if hours[i] > max_val: max_val = hours[i] best_time = i best_time_description = None for tod in times_of_day: times = tod["times"] if times[0] <= best_time < times[1]: best_time_description = tod["name"] break return render_template("osrc.html", osrc_data=osrc_raw, avatar=osrc_raw["user"]["avatar_url"], user=osrc_raw["name"], first_name=osrc_raw["first_name"], adjectives=adjectives, language_list=language_list, used_languages=used_languages, sorted_cumulative_languages=osrc_raw["sorted_cumulative_languages"], most_used_language=most_used_language, event_actions=event_actions, most_done_event=most_done_event, week_type=week_type, best_time=best_time, best_time_description=best_time_description, latest_repo_contributions=osrc_raw["latest_repo_contributions"][:5], event_verbs=event_verbs, unique_events=osrc_raw["unique_events"].keys(), unique_events_obj=osrc_raw["unique_events"], events_vector=osrc_raw["events_vector"], weekly_unique_events=osrc_raw["weekly_unique_events"], hourly_unique_events=osrc_raw["hourly_unique_events"], )
def raw_json(): # Load 3341b info with current_app.open_resource("3341b_info.json") as f: info = json.load(f) return jsonify(info=info)
def init_db(): """Clear existing data and create new tables.""" db = get_db() with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf8'))
def _fetch_sql_string(file_name): with current_app.open_resource(os.path.join('queries', file_name), mode='r') as f: return f.read()
def init_db(): """Clear existing data and create new tables.""" db = get_db() with current_app.open_resource("schema.sql") as f: db.executescript(f.read().decode("utf8"))
def init_db(): db = get_db() with current_app.open_resource('schema.sql') as sqlfile: db.executescript(sqlfile.read().decode('utf8'))
def init_db(): db = get_db() # use the schema file to initialize the database # current_app.open_resource -> context manager like open() with current_app.open_resource('schema.sql') as db_schema: db.executescript(db_schema.read().decode('utf8'))
def init_db(): db = get_db() with current_app.open_resource( 'schema.sql') as f: #打开一个文件,该文件名是相对于flaskr 包的 db.executescript(f.read().decode('utf8'))
def init_db(): db = get_db() # Opens an sql file with current_app.open_resource('stickersend.sql') as f: db.executescript(f.read().decode('utf8'))
def read_template(template_name): with current_app.open_resource("templates/" + template_name) as template: return template.read().decode("utf-8")
def index(): with current_app.open_resource("people.json") as f: people = json.load(f) with current_app.open_resource("projects.json") as f: projects = json.load(f) return render_template("index.html", people=people, projects=projects)
def init_db(): db = get_db() with db.cursor() as cursor, current_app.open_resource('schema.sql') as f: cursor.execute(f.read().decode('utf8')) db.commit()
def init_db_load(): db = get_db() with current_app.open_resource('data_load.sql') as f: db.executescript(f.read().decode('utf8'))
def init_db(): db = get_db() with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf8'))
def init_db_manual_update(): db = get_db() if os.path.isfile('db_update.sql'): print("File exist") with current_app.open_resource('db_update.sql') as f: db.executescript(f.read().decode('utf8'))
def generateMap(self): with current_app.open_resource(self.file) as f: shapes = json.load(f) usedFilter = None shapesFeatures = shapes['features'] shapeNames = [ feat['properties']['name'].upper() for feat in shapesFeatures ] district_x = [[x[0] for x in feat["geometry"]["coordinates"][0]] for feat in shapesFeatures] district_y = [[y[1] for y in feat["geometry"]["coordinates"][0]] for feat in shapesFeatures] district_xy = [[xy for xy in feat["geometry"]["coordinates"][0]] for feat in shapesFeatures] polygons = [Polygon(xy) for xy in district_xy] shapeNumbers = [0] * len(shapeNames) shapeNormalized = [100] * len(shapeNames) if self.capitulo != None: self.calculateOcurrences(polygons, shapeNumbers, shapeNames, capitulo=self.capitulo, edad=self.edad) self.normalizeCie10(shapeNumbers, shapeNames, shapeNormalized) usedFilter = "capitulo" elif self.agrupacion != None: self.calculateOcurrences(polygons, shapeNumbers, shapeNames, agrupacion=self.agrupacion, edad=self.edad) self.normalizeCie10(shapeNumbers, shapeNames, shapeNormalized) usedFilter = "agrupacion" else: if self.cie10 == "all" or self.cie10 == None: self.calculateOcurrences(polygons, shapeNumbers, shapeNames, edad=self.edad) usedFilter = "Pacientes Totales" else: self.calculateOcurrences(polygons, shapeNumbers, shapeNames, cie10=self.cie10, edad=self.edad) self.normalizeCie10(shapeNumbers, shapeNames, shapeNormalized) usedFilter = "cie10" sectores = {} for i in range(0, len(shapeNumbers)): sectores[shapeNames[i]] = shapeNumbers[i] # print shapeNames[i], ",",shapeNumbers[i] for feature in shapes["features"]: index = shapeNames.index(feature["properties"]["name"].upper()) feature["properties"]["density"] = shapeNumbers[index] feature["properties"]["normalized"] = shapeNormalized[index] # with current_app.open_resource(self.output, 'w') as outfile: # with open(os.path.join(self.app.root_path, self.output), 'w') as outfile: # json.dump(shapes, outfile) return shapes, usedFilter
def init_db(): db = DataManager.get_db() with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf8'))
def _fetch_sql_string(file_name): with current_app.open_resource(os.path.join('queries', file_name), mode='r') as f: return f.read()
def init_db(): db = get_db() with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf8')) data_management.init_chapters(db)
def init_db(): db = get_db() with current_app.open_resource('schema.sql', mode='r') as f: db.cursor().executescript(f.read()) db.commit()
from flask import current_app, Flask from instance.config import Config, Testing app = Flask(__name__) url = "dbname='questioner_app' user='******' host='localhost' password='******'" test_url ="dbname='questioner_test' user='******' host='localhost' password='******'" def init_db(): """ Method to initialize the database """ with app.app_context(): conn = psycopg2.connect(url) <<<<<<< HEAD # url_db = Config.DATABASE_URL # conn = psycopg2.connect(url_db) cursor = conn.cursor() sql = current_app.open_resource('tables.sql', mode='r') cursor.execute(sql.read()) ======= >>>>>>> after after Creating Conflict conn.commit() return conn def connect_to(url): conn = psycopg2.connect(url) return conn def _init_db(): """ Initialize database for test """ with app.app_context(): conn = psycopg2.connect(test_url)
def email(): location = get_location() path = get_file() filename = session['original_filename'] cols = get_columns() options = get_options() df = load_csv_as_dataframe(path, cols) email = request.form.get('email') if not email: flash("Email is required.", category="danger") raise RequestRedirect(url_for('star.analyze')) optin = bool(request.form.get('optin', False)) title = request.form.get('title') name = request.form.get('name') organization = request.form.get('organization') db = get_db() c = db.cursor() c.execute( """ INSERT INTO emails (email, optin, title, name, organization, datetime) VALUES(?, ?, ?, ?, ?, ?) """, (email, optin, title, name, organization, datetime.now())) db.commit() model = VODModel(df, location=location, columns=cols, options=options) analysis = Analysis() results = analysis.analyze(model.data_frame) min_twilight, max_twilight = model.find_twilight_range() itp_range = "{} - {}".format(min_twilight.strftime("%H:%M:%S"), max_twilight.strftime("%H:%M:%S")) min_date, max_date = model.find_date_range() date_range = "{} - {}".format(min_date.strftime("%x"), max_date.strftime("%x")) params = dict(datetime=datetime.now().strftime("%x %X %Z"), location=location, original_filename=filename, cols=cols, options=options, original_record_count=len(df.index), final_record_count=len(model.data_frame.index), date_range=date_range, itp_range=itp_range, light_count=model.light_count(), dark_count=model.dark_count(), results=results, root_dir=current_app.config['ROOT_DIR']) pdf_html = render_template("email.html", **params) # return pdf_html options = { 'page-size': 'Letter', 'margin-top': '0.75in', 'margin-right': '0.75in', 'margin-bottom': '0.75in', 'margin-left': '0.75in', 'encoding': "UTF-8" } directory = tempfile.mkdtemp() pdffile = os.path.join(directory, "out.pdf") pdf = pdfkit.from_string(pdf_html, pdffile, options=options) msg = Message("RTI-STAR Report for {}".format(name or email), recipients=[email]) msg.body = "Your report is attached." with current_app.open_resource(pdffile) as fp: msg.attach("report.pdf", "application/pdf", fp.read()) mail.send(msg) flash("Your email has been sent.") raise RequestRedirect(url_for('star.analyze'))
def init_db(): db = get_db() with current_app.open_resource(db_schema_file_name) as f: db.executescript(f.read().decode('utf8'))
def load(abs_path): with current_app.open_resource(abs_path) as f: return FlatFile(f.read())
def init_db(app): with app.app_context(): db = get_db() with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf8'))
def get_file_contents(pic_path): with current_app.open_resource(pic_path) as fp: return fp.read()
def init_db(): db = get_db() with current_app.open_resource("res/schema.sql") as f: db.executescript(f.read().decode("utf8"))
def init_db(): db = get_db() # open schema file (@ path with reference to the current) # then read and execute the commands there (ie, create tables from the data) with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf8'))
def init_db(): db = get_dbincart() with current_app.open_resource('schema/schema_incart.sql') as f: db.executescript(f.read().decode('utf8'))
def drop_db(): """Drops all database tables and erases all data""" db = get_db() with app.open_resource('./database/drop.sql', mode='r') as f: db.cursor().executescript(f.read()) db.commit()
def read_template(template_name): with current_app.open_resource("templates/" + template_name) as template: return template.read().decode("utf-8")
def init_db(): """Creates the tables for the database and inserts some example data into it""" db = get_db() with app.open_resource('./database/schema.sql', mode='r') as f: db.cursor().executescript(f.read()) db.commit()
def init_db(): db = get_db() # Open a file relative to the flaskr package with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf8'))
def init_db(): db = get_db() with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('UTF-8')) db.commit()
def init_db(): db = get_db() # open_resource looks for file local to flaskr package with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf-8'))
def init_db(): """Initializes the database.""" db = get_db() with current_app.open_resource('schema.sql', mode='r') as f: db.cursor().executescript(f.read()) db.commit()
#!/usr/bin/env python3 import connexion import json import logging from flask import current_app import database.database as database logging.basicConfig(level=logging.DEBUG) if __name__ == '__main__': app = connexion.App(__name__, specification_dir='swagger/') app.add_api('swagger.yaml', arguments={'title': 'tapi-connectivity API generated from tapi-connectivity.yang'}) app.app.config['JSON_SORT_KEYS']=False with app.app.app_context(): with current_app.open_resource("database/context.json", 'r') as f: database.context = json.load(f) app.run(port=8080)
def init_db(): """Initialize database""" db = get_db() with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf8'))
def init_db(): db = get_db() # open_resource() opens a file relative to the flaskr package, which is useful since you won’t necessarily know where that location is when deploying the application later. get_db returns a database connection, which is used to execute the commands read from the file. with current_app.open_resource('schema.sql') as f: db.executescript(f.read().decode('utf8'))