def main(): Session = sessionmaker(bind=get_engine()) session = Session() currencies = session.query(CryptCurrency).all() JST = datetime.timezone(datetime.timedelta(hours=9), 'JST') now = datetime.datetime.now(JST) from_date = now - datetime.timedelta(hours=NOTIFY_SPAN) for currency in currencies: from_date_for_currncy = from_date if (currency.last_notified_at is not None and currency.last_notified_at.timestamp() > from_date.timestamp()): from_date_for_currncy = currency.last_notified_at prices = session.query(Price).filter( Price.crypt_currency_id==currency.id).filter( Price.updated > from_date_for_currncy).order_by( Price.updated.desc()).all() current_price = None for price in prices: if current_price is None: current_price = price.price_usd percentage = (price.price_usd - current_price) / price.price_usd * 100 if percentage > NOTIFY_PERCENTAGE: currency.last_notified_at = now session.commit() notify(currency, price, percentage) break
def refresh_concept_table(self): """ copy contents of the concept tables in reference schema to the concept table in main db """ LOGGER.info('loading reference concept tables into main db') e = get_engine(local_config.NCM_DB_CONN_STR) s = make_session() concept_tables = sql.get_reference_concept_manifest(e) # iterate through each of the tables in reference schema for i, r in concept_tables.iterrows(): d = sql.get_reference_concept_table(r.codesystem, r.table_name, e) # add each of the table's rows to the concept table in main db for x, y in d.iterrows(): s.add( ncm.Concept(uid=y.uid, local_concept_code=y.local_concept_code, nhs_concept_code=y.nhs_concept_code, codesystem=r.codesystem)) s.commit()
def init(): session = models.get_global_session() if session.query(models.User).count() > 0: return # create table models.Base.metadata.create_all(models.get_engine()) # create role session.add(models.Role(u"站长", 1)) session.add(models.Role(u"管理员", 2)) session.add(models.Role(u"会员", 3)) session.commit() # create init admin user if not configs.init_admin_username or not configs.init_admin_password: raise InitError("InitError: admin username or password is not found in config.") admin_user = models.User(configs.init_admin_username, configs.init_admin_password, 1) session.add(admin_user) session.commit() # create model for url, page in UI.Manager.page_dict.iteritems(): session.add(models.Model(page.__name__, url)) session.commit() # create roleAndModel for model in session.query(models.Model): session.add(models.RoleAndModel(1, model.id)) session.commit()
def init_application(dbconf=None,cusconf=None,secret=None): log.startLogging(sys.stdout) base.update_secret(secret) utils.update_secret(secret) log.msg("start init application...") TEMPLATE_PATH.append("./customer/views/") ''' install plugins''' engine,metadata = models.get_engine(dbconf) sqla_pg = sqla_plugin.Plugin(engine,metadata,keyword='db',create=False,commit=False,use_kwargs=False) session = sqla_pg.new_session() _sys_param_value = functools.partial(get_param_value,session) _get_member_by_name = functools.partial(get_member_by_name,session) _get_account_by_number = functools.partial(get_account_by_number,session) _get_online_status = functools.partial(get_online_status,session) MakoTemplate.defaults.update(**dict( get_cookie = get_cookie, fen2yuan = utils.fen2yuan, fmt_second = utils.fmt_second, request = request, sys_param_value = _sys_param_value, system_name = _sys_param_value("2_member_system_name"), get_member = _get_member_by_name, get_account = _get_account_by_number, is_online = _get_online_status )) websock.connect( _sys_param_value('3_radiusd_address'), _sys_param_value('4_radiusd_admin_port') ) mainapp.install(sqla_pg)
def init_application(dbconf=None,cusconf=None): log.startLogging(sys.stdout) TEMPLATE_PATH.append("./customer/views/") ''' install plugins''' engine,metadata = models.get_engine(dbconf) sqla_pg = sqla_plugin.Plugin(engine,metadata,keyword='db',create=False,commit=False,use_kwargs=False) session = sqla_pg.new_session() _sys_param_value = functools.partial(get_param_value,session) _get_member_by_name = functools.partial(get_member_by_name,session) _get_account_by_number = functools.partial(get_account_by_number,session) _get_online_status = functools.partial(get_online_status,session) MakoTemplate.defaults.update(**dict( get_cookie = get_cookie, fen2yuan = utils.fen2yuan, fmt_second = utils.fmt_second, request = request, sys_param_value = _sys_param_value, system_name = _sys_param_value("2_member_system_name"), get_member = _get_member_by_name, get_account = _get_account_by_number, is_online = _get_online_status )) websock.connect( _sys_param_value('3_radiusd_address'), _sys_param_value('4_radiusd_admin_port') ) mainapp.install(sqla_pg)
def init_application(dbconf=None,consconf=None): log.startLogging(sys.stdout) TEMPLATE_PATH.append("./admin/views/") ''' install plugins''' engine,metadata = models.get_engine(dbconf) sqla_pg = sqla_plugin.Plugin(engine,metadata,keyword='db',create=False,commit=False,use_kwargs=False) _sys_param_value = functools.partial(get_param_value,sqla_pg.new_session()) MakoTemplate.defaults.update(**dict( get_cookie = get_cookie, fen2yuan = utils.fen2yuan, fmt_second = utils.fmt_second, request = request, sys_param_value = _sys_param_value, system_name = _sys_param_value("1_system_name"), radaddr = _sys_param_value('3_radiusd_address'), adminport = _sys_param_value('4_radiusd_admin_port') )) # connect radiusd websocket admin port websock.connect( MakoTemplate.defaults['radaddr'], MakoTemplate.defaults['adminport'], ) mainapp.install(sqla_pg) ops_app.install(sqla_pg) bus_app.install(sqla_pg) mainapp.mount("/ops",ops_app) mainapp.mount("/bus",bus_app) #create dir try: os.makedirs(os.path.join(APP_DIR,'static/xls')) except:pass
def write_worker(self): logger.debug('write start {}'.format(self.id_ if self.id_ else '')) engine = models.get_engine(**options.as_dict()) with contextlib.closing(models.get_session(engine)) as session: t = models.TestTable.random(session) session.add(t) session.commit() logger.debug('write stop {}'.format(self.id_ if self.id_ else ''))
def init_application(dbconf=None, consconf=None): log.startLogging(sys.stdout) log.msg("start init application...") TEMPLATE_PATH.append("./admin/views/") ''' install plugins''' log.msg("init plugins..") engine, metadata = models.get_engine(dbconf) sqla_pg = sqla_plugin.Plugin(engine, metadata, keyword='db', create=False, commit=False, use_kwargs=False) session = sqla_pg.new_session() _sys_param_value = functools.partial(get_param_value, session) log.msg("init template context...") MakoTemplate.defaults.update( **dict(get_cookie=get_cookie, fen2yuan=utils.fen2yuan, fmt_second=utils.fmt_second, currdate=utils.get_currdate, request=request, sys_param_value=_sys_param_value, system_name=_sys_param_value("1_system_name"), radaddr=_sys_param_value('3_radiusd_address'), adminport=_sys_param_value('4_radiusd_admin_port'), permit=permit, all_menus=permit.build_menus( order_cats=[u"系统管理", u"营业管理", u"运维管理"]))) # connect radiusd websocket admin port log.msg("init websocket client...") wsparam = ( MakoTemplate.defaults['radaddr'], MakoTemplate.defaults['adminport'], ) reactor.callLater(5, websock.connect, *wsparam) log.msg("init tasks...") reactor.callLater(7, tasks.start_jobs, sqla_pg.new_session) log.msg("init operator rules...") for _super in session.query( models.SlcOperator.operator_name).filter_by(operator_type=0): permit.bind_super(_super[0]) log.msg("install plugins...") mainapp.install(sqla_pg) ops_app.install(sqla_pg) bus_app.install(sqla_pg) mainapp.mount("/ops", ops_app) mainapp.mount("/bus", bus_app) #create dir try: os.makedirs(os.path.join(APP_DIR, 'static/xls')) except: pass
def get_db_session(): engine = get_engine(CONFIG['DB_USERNAME'], CONFIG['DB_PASSWORD'], CONFIG['DB_HOST'], CONFIG['DB_NAME']) # Database creation happens during postgres docker image start, # see https://hub.docker.com/_/postgres/ # Initialize tables Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) return Session()
def populate_database(): s = time.perf_counter() db_uri = get_database_uri() past_hosts = get_host_records() db_engine = get_engine(db_uri, echo=False) load_chideoer_records(db_engine=db_engine) load_host_records(db_engine=db_engine, host_records=past_hosts) elapsed = time.perf_counter() - s print(f"{__file__} completed in {elapsed:0.2f} seconds")
def read_worker(self): logger.debug('read start {}'.format(self.id_ if self.id_ else '')) engine = models.get_engine(**options.as_dict()) with contextlib.closing(models.get_session(engine)) as session: query = session.query(models.TestTable)\ .order_by(models.TestTable.value)\ .filter(models.TestTable.value > models.TestTable._rand_lim) for inst in query: inst logger.debug('read stop {}'.format(self.id_ if self.id_ else ''))
def return_ident_page(self): ident = self.get_argument('_i', '') if ident: self.detail['title'] = ident symid = symbolcache.get_symid(self.project_name, ident) if not symid: defs = [] refs = {} else: with SwitchEngine(get_engine(self.project_name)): objs = Definitions.query.filter(Definitions.symid==symid).all() defs = [] for o in objs: lang, desc = LangType.format_lang_type(o.typeid) filename = filecache.get_filename(self.project_name, o.fileid) defs.append( (desc, self._identfile(filename), self._identline(filename, o.line) ) ) with SwitchEngine(get_engine(self.project_name)): objs = Ref.query.filter(Ref.symid==symid).all() refs = {} for o in objs: filename = filecache.get_filename(self.project_name, o.fileid) item = (self._identfile(filename), self._identline(filename, o.line)) if o.fileid in refs: refs[o.fileid].append(item) else: refs[o.fileid] = [item] self.detail['defs'] = defs self.detail['refs'] = refs.values() self.detail['ident'] = ident self.render("ident.html", **self.detail)
def init(): engine = models.get_engine(**options.as_dict()) models.Base.metadata.create_all(engine) if options.truncate: with contextlib.closing(models.get_session(engine)) as session: models.TestTable.truncate(session) if options.init_records: with contextlib.closing(models.get_session(engine)) as session: session.bulk_save_objects([models.TestTable.random(session) for i in xrange(options.init_records)]) session.commit() logger.debug('initiated')
def init_application(dbconf=None,consconf=None,secret=None): log.startLogging(sys.stdout) log.msg("start init application...") base.update_secret(secret) utils.update_secret(secret) TEMPLATE_PATH.append("./admin/views/") ''' install plugins''' log.msg("init plugins..") engine,metadata = models.get_engine(dbconf) sqla_pg = sqla_plugin.Plugin(engine,metadata,keyword='db',create=False,commit=False,use_kwargs=False) session = sqla_pg.new_session() _sys_param_value = functools.partial(get_param_value,session) _get_product_name = functools.partial(get_product_name,session) log.msg("init template context...") MakoTemplate.defaults.update(**dict( get_cookie = get_cookie, fen2yuan = utils.fen2yuan, fmt_second = utils.fmt_second, currdate = utils.get_currdate, request = request, sys_param_value = _sys_param_value, get_product_name = _get_product_name, system_name = _sys_param_value("1_system_name"), radaddr = _sys_param_value('3_radiusd_address'), adminport = _sys_param_value('4_radiusd_admin_port'), permit = permit, all_menus = permit.build_menus(order_cats=[u"系统管理",u"营业管理",u"运维管理"]) )) # connect radiusd websocket admin port log.msg("init websocket client...") wsparam = (MakoTemplate.defaults['radaddr'],MakoTemplate.defaults['adminport'],) reactor.callLater(3, websock.connect,*wsparam) log.msg("init tasks...") reactor.callLater(5, tasks.start_online_stat_job, sqla_pg.new_session) log.msg("init operator rules...") for _super in session.query(models.SlcOperator.operator_name).filter_by(operator_type=0): permit.bind_super(_super[0]) log.msg("install plugins...") mainapp.install(sqla_pg) ops_app.install(sqla_pg) bus_app.install(sqla_pg) card_app.install(sqla_pg) mainapp.mount("/ops",ops_app) mainapp.mount("/bus",bus_app) mainapp.mount("/card",card_app) #create dir try:os.makedirs(os.path.join(APP_DIR,'static/xls')) except:pass
def db_updater(host, **kwargs): source = DataSource(**kwargs) engine = get_engine(host) with engine.begin() as conn: conn.execute(objects_status.delete()) conn.execute(objects.delete()) conn.execute(source.get_create_expression()) yield while True: upd = source.update_objects() engine.execute(upd) yield
def load(self, project_name): if project_name in self.cache_dict: return self.cache_dict[project_name] cache = SimpleCache() with SwitchEngine(get_engine(project_name)): for i in File.query.all(): k = i.fileid v = i.filename cache.set(k, v) cache.set(v, k) self.cache_dict[project_name] = cache return cache
def create_db(): Model.metadata.create_all(get_engine(), checkfirst=True) print('Created DB') db = get_db() for lift in ['Press', 'Deadlift', 'Bench press', 'Squat']: db.add(Lift(name=lift)) db.commit() print('Added 4 main Lifts') for lift in ['Press', 'Deadlift', 'Bench press', 'Squat']: increment_amount = 2.5 if 'press' in lift.lower() else 5 db.add(LiftIncrement(lift=lift, amount=increment_amount)) db.commit() print('...and their default increments')
def main(): setup_db() Session = sessionmaker(bind=get_engine()) session = Session() response = requests.get('https://api.coinmarketcap.com/v1/ticker/') for data in response.json(): id = data['id'] name = data['name'] symbol = data['symbol'] currency = CryptCurrency(id=id, name=name, symbol=symbol) session.add(currency) session.flush() session.commit()
def dbengine(app_settings, ini_file): engine = models.get_engine(app_settings) alembic_cfg = alembic.config.Config(ini_file) Base.metadata.drop_all(bind=engine) alembic.command.stamp(alembic_cfg, None, purge=True) # run migrations to initialize the database # depending on how we want to initialize the database from scratch # we could alternatively call: # Base.metadata.create_all(bind=engine) # alembic.command.stamp(alembic_cfg, "head") alembic.command.upgrade(alembic_cfg, "head") yield engine Base.metadata.drop_all(bind=engine) alembic.command.stamp(alembic_cfg, None, purge=True)
def upload_concept_tables(self): """ upload concept tables from concept_data files to reference schema """ LOGGER.info("uploading the concept tables") e = get_engine(local_config.NCM_DB_CONN_STR) con = e.raw_connection() csr = con.cursor() for c in concept_data.__all__: LOGGER.info("processing %s", c) curr_m = importlib.import_module("concept_data." + c) curr_c = concept.Concept(curr_m, c) curr_c.qc_data() curr_c.save_to_table(csr) con.commit()
def create_ncm_db(self): """ create the db tables as defined in models/ncm.py """ LOGGER.info("creating ncm database") e = get_engine(local_config.NCM_DB_CONN_STR) # prepare database sql.run_sql_script( os.path.abspath("sql_scripts/before_table_creation.sql"), e) # make the tables ncm.BASE.metadata.create_all(e) # amend tables and add other stuff sql.run_sql_script( os.path.abspath("sql_scripts/after_table_creation.sql"), e)
def main(): Session = sessionmaker(bind=get_engine()) session = Session() response = requests.get('https://api.coinmarketcap.com/v1/ticker/') for data in response.json(): price_usd = data['price_usd'] updated = data['last_updated'] crypt_currency_id = data['id'] price = Price(price_usd=price_usd, updated=datetime.datetime.fromtimestamp(int(updated)), crypt_currency_id=crypt_currency_id) session.add(price) session.flush() session.commit()
def _parse_include(self, frag): ss = self.blankre.split(frag) kk = [] for i in ss: if i == '': continue ni = i.strip() if not ni: kk.append(i) continue if i == '<': i = "<" kk.append(i) elif i == '>': i = ">" kk.append(i) elif i == '"': kk.append(i) elif self.is_reserved(i): kk.append(self.get_reserved_link(i)) elif self.filename and i.lower().endswith('.h'): from models import get_engine eg = get_engine(self.project_name) sql = "select filename from src_file where filename like '%%%s' limit 1;" % i ret = eg.execute(sql) obj = ret.fetchone() if obj is None: kk.append(i) else: kk.append(self.get_include_link(i, obj[0])) elif self.is_ident(i): kk.append(self.get_ident_link(i)) else: kk.append(i) return ''.join(kk)
def _calc_dir_content(self): from models import File dirs, files = self.files.getdir(self.reqfile) # filter ._xxx files = [_f for _f in files if not _f.startswith('._')] filenames = [os.path.join(self.reqfile, _f) for _f in files ] linecount_dict = {} with SwitchEngine(get_engine(self.project_name)): objs = File.query.get_many(filenames) for o in objs: linecount_dict[o.filename] = o.linecount if not dirs and not files: return '''<p class="error">\n<i>The directory /%s does not exist, is empty or is hidden by an exclusion rule.</i>\n</p>\n''' % self.reqfile res = [] _count = 0 if self.reqfile != '/': i = {} i['name'] = "Parent directory" i['class'] = 'dirfolder' i['dirclass'] = 'dirrow%d' % (_count%2 + 1) i['href'] = "/source/%s%s" % (self.project_name, os.path.dirname(self.reqfile)) i['img'] = '/icons/back.gif' i['linecount'] = '-' i['modtime'] = '-' i['desc'] = '' _count += 1 res.append(i) for dir_name in dirs: i = {} i['name'] = dir_name + "/" i['class'] = 'dirfolder' i['dirclass'] = 'dirrow%d' % (_count%2 + 1) if self.reqfile and self.reqfile != '/': i['href'] = "/source/%s%s/%s" % (self.project_name, self.reqfile, dir_name) else: i['href'] = "/source/%s/%s" % (self.project_name, dir_name) i['img'] = '/icons/folder.gif' i['linecount'] = '-' i['modtime'] = '-' i['desc'] = '' _count += 1 res.append(i) for file_name in files: i = {} i['name'] = file_name i['class'] = 'dirfile' i['dirclass'] = 'dirrow%d' % (_count%2 + 1) if self.reqfile != '/': i['href'] = "/source/%s%s/%s" % (self.project_name, self.reqfile, file_name) else: i['href'] = "/source/%s/%s" % (self.project_name, file_name) i['img'] = '/icons/generic.gif' i['linecount'] = linecount_dict.get(os.path.join(self.reqfile, file_name), '-') i['modtime'] = '-' i['desc'] = '' _count += 1 res.append(i) loader = template.Loader(self.settings['template_path']) html = loader.load('htmldir.html').generate(files=res, desc='') return html
def initialize_db(db_name='scatdat.db'): global session engine = models.get_engine(db_name) session = models.get_session(engine) models.create_tables(engine)
def db_engine(app_settings): engine = models.get_engine(app_settings) if bool(os.environ.get('DB_ECHO', 0)): engine.echo = True return engine
from flask import Flask from flask import flash, request, jsonify, abort, render_template, session, redirect, url_for from urllib.request import urlopen, URLError, urlparse from article import save_article from models import get_engine, articles from sqlalchemy.sql import table, column, select import os contento = Flask(__name__) engine = get_engine() conn = engine.connect() def valid_url(url): parsed_url = urlparse(url) return bool(parsed_url.scheme) @contento.route('/api/article', methods=['POST']) def create_article(): content = request.json url = content['url'] languange_to_translate = content['lang'] keywords_matching = content['keywords_matching'] old_days = content['old_days'] t = table('articles', column('url')) s = select([t]).where(t.c.url == url) r = conn.execute(s) results = r.fetchall() if (len(results) > 0):
def save_article(url, lang, keywords_matching, old_days): errors = [] try: a = Article(url) a.download() a.parse() if a.publish_date is None: print("No publish date") try: with urlopen(url) as f: conn = urlopen(url, timeout=30) publish_date = conn.headers['last-modified'] if publish_date is None: errors = { 'error': 'No publish date in headers', 'success': False } return errors print("Publish date from headers " + str(publish_date)) except Exception as e: errors = { 'error': 'Cant get last modified date from headers' + str(e), 'success': False } return errors else: publish_date = utc_to_local(a.publish_date) now = datetime.now(timezone.utc) time_between_insertion = now - publish_date if time_between_insertion.days > int(old_days): errors = { 'error': "The insertion date is older than " + str(old_days) + " days", 'success': False } return errors text = translate_from_google(a.text, lang, 'text') if (len(text) < 500): errors = {'error': "Text is less than 400 chars", 'success': False} return errors try: matches = is_similar_context(text) # print(matches) except Exception as e: print('Problems with similar context' + str(e)) if (len(matches) < int(keywords_matching)): errors = { 'error': "The keywords matching are less " + str(keywords_matching), 'success': False } return errors data = {} data['article'] = [] title = translate_from_google(a.title, lang) a.nlp() keywords = [keyword for keyword in a.keywords if len(keyword) > 3] keywords = [ translate_from_google(keyword, lang) for keyword in keywords ] keywords.extend(get_keywords_from_text(text)) k = set(keywords) unique_keywords = list(k) summary = translate_from_google(a.summary, lang) except Exception as e: errors = { 'error': 'Some errors trying to parse Article: ' + str(e), 'success': False } return errors ''' data['article'].append({ 'original_title': a.title, 'title': title, 'author': a.authors, 'original_text': a.text, 'text': text, 'top_image': a.top_img, 'keywords': unique_keywords, 'summary': summary, 'url': url, 'date': publish_date.strftime("%B %d, %Y") }) with open("articles.json", "a", encoding='utf8') as outfile: json.dump(data, outfile, ensure_ascii=False) ''' wp = Wordpress() query_for_images = (" ".join(get_keywords_nltk(text, 3))) images_src = get_pics(query_for_images, 1) if images_src is None: image_src = a.top_image else: image_src = images_src[0] if (wp.publish(title, text, image_src, unique_keywords)): engine = get_engine() conn = engine.connect() ins = insert(articles).values(original_title=a.title.encode('utf-8'), title=title, author=' '.join(a.authors), original_text=a.text.encode('utf-8'), text=text, top_image=a.top_img, keywords=', '.join(unique_keywords), summary=summary, url=url, date=publish_date) r = conn.execute(ins) response = {'message': "Publish OK", 'success': True} return response else: errors = {'error': "Error in publishing", 'success': False} return errors response = {'error': "Parse OK", 'success': False} return response
def drop_db(): engine = get_engine() Model.metadata.reflect(engine) Model.metadata.drop_all(engine) print('Dropped DB')