def Main(): greetings = """ .__ __ _ __ ____ | | ____ ____ _____ ____ \ \/ \/ // __ \| | _/ ___\/ _ \ / \_/ __ \ \ /\ ___/| |_\ \__( <_> ) Y Y \ ___/ \/\_/ \___ >____/\___ >____/|__|_| /\___ > \/ \/ \/ \/ """ print greetings server_settings = raw_input( "Choose 1/2:\n1 -> Default connection settings (loopback:8080)\n2 -> Custumize connection settings\n" ) while not server_settings.isdigit() or int(server_settings) not in [1, 2]: print "Invalid" server_settings = raw_input( "Choose 1/2:\n1 -> Default connection settings (loopback:8080)\n2 -> Custumize connection settings\n" ) if int(server_settings) == 2: port = int(raw_input("Enter port number ->")) host = raw_input("Enter host ip ->") else: host = '127.0.0.1' port = 8080 for dir in important_dirs: if not os.path.exists(dir): os.makedirs(dir) db.create_db() thread = threading.Thread(target=Serve, args=(host, port)) thread.daemon = True thread.start() while True: exit_signal = raw_input('Type "exit" anytime to stop server\n') if exit_signal == 'exit': break
def parameter_sweep(conf_file): with open(conf_file) as f: conf = json.load(f) settings = conf["settings"] if not os.path.isfile(settings["db"]): db.create_db(settings["db"]) sweep = db.insert_sweep(settings["db"], conf_file, settings["description"]) for i in range(settings["sessions"]): params = {} model_name = "sweep_{}_session_{}_model.pth".format(sweep, i) session = db.insert_session(settings["db"], model_name) for p in conf["sweep"]: param = random.choice(conf["sweep"][p]) params[p] = param db.insert_parameters(settings["db"], session, list(zip(params.keys(), params.values()))) try: run_session(settings["db"], settings["max_session_length"], i, session, model_name, params) db.update_description(settings["db"], session, "Success") except: pass
def test_0_create_database(self): db.create_db() with closing(sqlite3.connect(db.DB_NAME)) as conn: cur = conn.cursor() cur.execute( '''SELECT name FROM sqlite_master WHERE type='table';''') tables = cur.fetchall() assert ('MESSAGES', ) and ('SPORTS', ) and ('EVENTS', ) and ( 'MARKETS', ) and ('SELECTIONS', ) in tables
def main(): # delete_rds_instance() if DB_ON_CLOUD: setup_rds_instance() create_db() create_schema() etl()
def client(): db_fd, app.config['DATABASE'] = tempfile.mkstemp() create_db(app.config['DATABASE']) app.config['TESTING'] = True client = app.test_client() yield client os.close(db_fd) os.unlink(app.config['DATABASE'])
def deploy_project(target_dir='tolong'): """ Deploys a project for the first time """ setup_env(target_dir) if exists("%s" % env.PROJECT_PATH): abort( 'Project already deployed. Run "fab update_project -H <host>" instead' ) setup_os() require.files.directory(env.PROJECT_PATH, use_sudo=True, owner='www-data', group='www-data') update_source_code(deploy=True) setup_virtualenv() log_dir = os.path.join(env.PROJECT_PATH, 'logs') require.files.directory(log_dir, use_sudo=True, owner='www-data', group='www-data') setup_webserver() create_db() context = { 'db_name': env.db_name, 'db_user': env.db_user, 'db_pass': env.db_pass, } upload_template('fabfiles/conf_templates/settings_local.py.tpl', env.HOME_PATH, context=context) sudo('mv %ssettings_local.py.tpl %s/%s/settings_local.py' % (env.HOME_PATH, env.SRC_PATH, env.PROJECT_NAME)) install_project_requirements() collect_static() # compress_static() sudo('chown -R www-data:www-data %s' % env.PROJECT_PATH) sudo('{0} syncdb'.format(env.MANAGE_BIN)) migrate() ''' print("""Finished deployment. Now you need to:\n 1. Edit your %s/settings_local.py file\n 2. Run "fab create_initial_migration -H <host>"\n 3. Connect to server and run "%s manage.py syncdb" from %ssrc\n 4. Run "fab migrate -H <host>"\n 5. Run "fab collect_static -H <host>" """ % (env.SRC_PATH, env.PYTHON_BIN, env.PROJECT_PATH)) ''' restart_webserver()
def set_up(): db.create_db() welcome_text = ''' (\__/) (•ㅅ•) / づ ♥ WELCOME INTO YOUR NEXT LEVEL! -----------------------------''' print(Fore.BLUE + Style.BRIGHT + welcome_text + Style.RESET_ALL) print(Fore.CYAN + Style.BRIGHT + str(datetime.date.today()) + ", " + calendar.day_name[datetime.datetime.today().weekday()] + Style.RESET_ALL) print(Fore.MAGENTA + Back.WHITE + Style.BRIGHT + "\n Oooh! It seems we got a new sweet user ♥‿♥ \n" + Style.RESET_ALL ) user = read_configration() user.id = db.insert_new_user(user) return user
def index(): if not os.path.exists(db_name): create_db(db_name) if request.method == "GET": return render_template("index.html") elif request.method == "POST": if request.form["logon_btn"] == "yes_logon": flash("logon btn clicked") return redirect(url_for("login_check")) elif request.form["logon_btn"] == "no_register": flash("register clicked") return redirect(url_for("register_user")) else: return redirect(url_for("logout"))
def create_app(armazenamento=None): app = Flask(__name__, static_folder=static_folder) app.config.update({ 'SECRET_KEY': 'MySecretIsMyPasswordBlaBla', 'TESTING': True, 'DEBUG': True, 'SQLALCHEMY_DATABASE_URI': openid_db, 'OIDC_CLIENT_SECRETS': 'client_secrets.json', 'OIDC_ID_TOKEN_COOKIE_SECURE': False, 'OIDC_REQUIRE_VERIFIED_EMAIL': False, 'OIDC_OPENID_REALM': uri + '/oidc_callback' }) CORS(app) Compress(app) print("app criado.") from db import create_db db = create_db() db.init_app(app) oidc = OpenIDConnect(app, armazenamento) add_routes(app, oidc) return app
def create_app(config=None): "Create a Flask app given config" app = Flask(__name__) distutils.dir_util.mkpath(DIRS.user_data_dir) if config is None: config = find_config() if config is not None: app.config.from_pyfile(config) create_db(app) app.client = create_client(app) return app
def create_new_db(db_name, javbus_url, stars): ''' Create a new db file with 2 tables :param db_name: :param javbus_url: :param stars: :return: ''' db.create_db(db_name) for s in stars: star_id = scrapy.get_star_javbus_code(javbus_url, s) print(star_id) star = scrapy.star_content_analyse(javbus_url, star_id) star.name = s db.insert_new_star(db_name, star) return
def __init__(self, configfile=None): global __rootdir__,configdir,databasefile self.configfile = configfile self.channels = list() self.dvbctl = list() self.adapters = list() self.channelsdir = os.path.join(__rootdir__,'resources','media','channels') self.preorderedchannels = os.path.join(configdir,'preorderedchannels-'+get_country()+'.conf') if configfile == None: raise Exception('TvS Config Error') if not os.path.exists(configfile): # create data base configuration file db.create_db() log_error('Config file missing: '+configfile) return
def define_variables(): from xlrd import open_workbook excel_file = open_workbook('a3m.xlsx', encoding_override='utf-8') sheet = excel_file.sheet_by_name('Instruction') try: db.tanimlar['optional'] = sheet.cell_value(7, 2) except: pass db.tanimlar['company'] = sheet.cell(6, 2).value for sheet in excel_file.sheets()[2:]: db.periodss.append(sheet.name) db.Hesaplar = db.make_hesaplar() db.create_db() db.len_periods = len(db.periodss)
def loop_urls(self,client): tf={} df=dict() tf_idf = {} important_tags = dict() valid_doc_count = 0 for url in self.url_file_map: if url not in tf and self.is_not_trap(url,tf): valid_doc_count += 1 file_address = self.get_file_address(url) f=open(file_address,"r",encoding="utf8") parser = MyHTMLParser() parser.feed(f.read()) text = parser.get_data() tf[url]={} word_set = set() #for df words=re.split("[^a-z0-9]+",text.lower()) words=list(filter(None,words)) for word in words: if not word in tf[url]: tf[url][word] = 1 else: tf[url][word]+=1 if not word in word_set: if not word in df: df[word]=1 else: df[word]+=1 word_set.add(word) important_tags[url]=parser.get_important_words() f.close() for doc in tf: for term in tf[doc]: if not term in tf_idf.keys(): tf_idf[term]={} tf_idf[term][doc]=(1+math.log(tf[doc][term],10)) * math.log(valid_doc_count/df[term]) create_db(client,tf_idf,important_tags)
def main(): if not os.path.exists(config.db_name): db.create_db(config.db_name) db.create_table(config.db_name, config.raw_tx_table_name) db.create_table(config.db_name, config.delta_tx_table_name) db.create_table(config.db_name, config.block_tx_table_name) # time.sleep(3) raw_cache = db.DBCache() delta_cache = db.DBCache() flush_raw_thread = threading.Thread(target=flash_raw_cache_to_db, args=[raw_cache]) flush_raw_thread.start() flush_block_thread = threading.Thread(target=flash_block_cache_to_db, args=[delta_cache]) flush_block_thread.start() dump_db_thread = threading.Thread(target=dump_db, args=[]) dump_db_thread.start() get_raw_thread = [] for i in range(config.get_raw_tx_thread_size): get_raw_thread.append(threading.Thread(target=get_new_tx, args=[raw_cache, delta_cache])) get_raw_thread[i].start() if config.generate_snapshot: dump_raw_thread = threading.Thread(target=dump_raw_db, args=[]) dump_raw_thread.start() dump_delta_thread = threading.Thread(target=dump_delta_db, args=[]) dump_delta_thread.start() assign_update_work(raw_cache, delta_cache) if config.generate_snapshot: dump_raw_thread.join() dump_delta_thread.join() for i in range(config.get_raw_tx_thread_size): get_raw_thread[i].join() dump_db_thread.join() flush_raw_thread.join() flush_block_thread.join()
def post(self): args = self.reqparser.parse_args() # Error checking: Whether a name is entered if args['db_name'] is None: return {'Error': 'Please enter a db_name'} db_name = args['db_name'] result = create_db(db_name) return result
def __init__(self): super().__init__() if (not os.path.isfile("bus_route")): db.create_db() self.geometry("900x500") self.resizable(0, 0) self.p = openPhoto.openPhoto(f"{path}background(copy).png", 900, 500) Label(image=self.p).place(x=0, y=0) Label(text="DIU bus route information management system", font="Arial 10 italic", anchor="w", width=150, height=1, bg="lightgreen").place(x=0, y=480) self.login()
def main(): """Main bot function.""" BOT_TOKEN = config('BOT_TOKEN') updater = Updater(BOT_TOKEN, use_context=True) dispatcher = updater.dispatcher # Handlers start_handler = CommandHandler(['start', 'help'], start) set_reminder_handler = ConversationHandler( entry_points=[CommandHandler('start_memorizing', start_memorizing)], states={ SUBJECT: [MessageHandler(Filters.text & ~Filters.command, set_subject)], DESCRIPTION: [ CommandHandler('skip', skip_description), MessageHandler(Filters.text & ~Filters.command, set_description) ], SAVE: [CommandHandler('save', save)] }, fallbacks=[CommandHandler('cancel', cancel)], allow_reentry=True) show_more_handler = CallbackQueryHandler(show_more) stop_memorizing_handler = ConversationHandler( entry_points=[CommandHandler('stop_memorizing', stop_memorizing)], states={ DELETE: [MessageHandler(Filters.text & ~Filters.command, delete_entry)] }, fallbacks=[CommandHandler('cancel', cancel)], allow_reentry=True) show_my_list_handler = CommandHandler('show_my_list', show_my_list) dispatcher.add_handler(start_handler) dispatcher.add_handler(show_more_handler) dispatcher.add_handler(set_reminder_handler) dispatcher.add_handler(stop_memorizing_handler) dispatcher.add_handler(show_my_list_handler) create_db() updater.start_polling() updater.idle()
def init(): """ 创建脚本所需数据库、表单,并填充数据 :return: None """ global functions config = configparser.ConfigParser() config.read('NoticeReminder.ini', 'utf-8') db_name = config['Database']['DatabaseName'] user_table_name = config['Database']['UserTableName'] department_table_names = config['Database']['DepartmentTableNames'].split( ',') logging.info('BEGIN INIT') print('BEGIN INIT') db.create_db(db_name) # 创建数据库 if not db.is_table_exist(db_name, user_table_name): # 用户表单不存在则创建用户表单 db.create_table(db_name, user_table_name, 'sid text, email text') for i in range(len(department_table_names)): # 遍历部门名称 if not db.is_table_exist( db_name, department_table_names[i]): # 对应的部门表单不存在则创建部门表单 db.create_table(db_name, department_table_names[i], 'title text, url text, date text') if not db.is_column_exist( db_name, user_table_name, department_table_names[i]): # 对应的部门列在用户表单里不存在则插入新列 db.insert_column(db_name, user_table_name, department_table_names[i] + ' int') if not db.fetch_row_all(db_name, department_table_names[i], '*') and i < len(functions): # 部门表单为空则进行预填充 new_notice = functions[i] # 预存当前网站上的通知,用于下次检测新通知时拿来对比 for notice in new_notice[::-1]: # 网站上的通知都是从上往下从新到旧,所以要倒序存放 store_notice_locally(department_table_names[i], notice[0], notice[1], notice[2]) logging.info('FINISH INIT') print('FINISH INIT')
def deploy_project(target_dir='tolong'): """ Deploys a project for the first time """ setup_env(target_dir) if exists("%s" % env.PROJECT_PATH): abort('Project already deployed. Run "fab update_project -H <host>" instead') setup_os() require.files.directory(env.PROJECT_PATH, use_sudo=True, owner='www-data', group='www-data') update_source_code(deploy=True) setup_virtualenv() log_dir = os.path.join(env.PROJECT_PATH, 'logs') require.files.directory(log_dir, use_sudo=True, owner='www-data', group='www-data') setup_webserver() create_db() context = { 'db_name': env.db_name, 'db_user': env.db_user, 'db_pass': env.db_pass, } upload_template('fabfiles/conf_templates/settings_local.py.tpl', env.HOME_PATH, context=context) sudo('mv %ssettings_local.py.tpl %s/%s/settings_local.py' % (env.HOME_PATH, env.SRC_PATH, env.PROJECT_NAME)) install_project_requirements() collect_static() # compress_static() sudo('chown -R www-data:www-data %s' % env.PROJECT_PATH) sudo('{0} syncdb'.format(env.MANAGE_BIN)) migrate() ''' print("""Finished deployment. Now you need to:\n 1. Edit your %s/settings_local.py file\n 2. Run "fab create_initial_migration -H <host>"\n 3. Connect to server and run "%s manage.py syncdb" from %ssrc\n 4. Run "fab migrate -H <host>"\n 5. Run "fab collect_static -H <host>" """ % (env.SRC_PATH, env.PYTHON_BIN, env.PROJECT_PATH)) ''' restart_webserver()
def init_iata(db_file): """ initialization of IATA list :param db_file: IATA database filename :return: IATA list """ iata_list = get_iata_from_url() if iata_list is None: return "No connection to URL" if not os.path.isfile(db_file): create_db(db_file) init_db(db_file, iata_list, 1) else: check_all_data_in_db = check_all_iata_in_db(db_file, iata_list) if check_all_data_in_db == 1: print("Database was updated") elif check_all_data_in_db != 0: return check_all_data_in_db return iata_list
def main(parsed, actions): db = create_db(parsed['db']) sess = new_session(db) random.seed(parsed['seed']) for action in actions: func = ACTIONS.get(action) if func is None: raise ValueError("valid actions are {}".format(list( ACTIONS.keys()))) print("--Running '{}'--".format(action)) func(sess, parsed)
def insert_to_db(limit=1000): import db db.create_db() for departemnt in departemnt_list()[:limit]: dep_id = db.add_department( departemnt["departemnt_num"].decode("utf8"), departemnt["departemnt_name"].decode("utf8")) if dep_id < 0: print "Error adding dep", departemnt["departemnt_num"] continue for course in departemnt_courses(int(departemnt["departemnt_num"])): lec = course["houres"] if not lec: print "Not found lecture times for course", course[ "course_num"] continue course_id = db.add_course( _num="%d.%d.%04d" % ( course["departemnt"], course["degree_level"], course["course_num"], ), _name=course["course_name"].decode("utf8"), _dep=dep_id) db.add_lecture( _course_id=course_id, _day=lec["day"], _start_time=lec["start_time"], _end_time=lec["end_time"], _location=lec["place"].decode("utf8"), )
def create_db_file(self): db_file, _ = QFileDialog.getSaveFileName( self, "Create DB", "", "SQLite files (*.sqlite);;All Files (*)", # options=QFileDialog.DontUseNativeDialog ) if not db_file: return if not db_file.endswith(db.DB_FILE_EXT): db_file += db.DB_FILE_EXT db.create_db(db_file) db_log_file = db_file + common.DB_LOG_FILE_EXT common.log_file_create(db_log_file) common.log_file_record_add( db_log_file, f"Created: {datetime.today().strftime('%Y-%m-%d %H:%M:%S')}") common.log_file_record_add( db_log_file, '-----------------------------------------------------------------' )
def fetch_url(job_id, url): #define update_job helper job_db = db if app.config['TESTING'] == True else create_db() try: resp = requests.get(url) soup = BS.BeautifulSoup(resp.text, 'html.parser') html = soup.prettify() update_job(job_db=job_db, job_id=job_id, html=html, status=JOB_STATUS_COMPLETE) app.logger.info("CELERY TASK COMPLETE FOR: %s", url) except Exception as exception: update_job(job_db=job_db, job_id=job_id, html=str(exception), status=JOB_STATUS_ERROR) app.logger.info("CELERY TASK FAILED FOR: %s", url)
def run_strategy(interval, coins, db_loc, step, balances): start, stop = interval assert start < stop log.debug("Backtesting for currencies: {}".format(coins)) log.debug("Running backtest between {}->{} at {} intervals".format( start, stop, step)) db = create_db(db_loc) sess = new_session(db) period = start account = Account(balances, period, coins=coins) start_value = account_value_btc(sess, account, start) # pass time bounds to bot object for data pre-fetching bot = Bot(sess, account, beginning=start, now=stop) low = high = start_value i = 0 while period < stop - step: period += step bot.tick(period) i += 1 if i % (0.5 * 6 * 24) == 0: # twice per day value = log_value(account, period, sess) low = min(low, value) high = max(high, value) close_alt_positions(sess, account, period) assert len(account.coins) == 1, f"unexpected positions: {account.coins}" finish_value = account.balance('BTC') low = min(low, finish_value) high = max(high, finish_value) gains, losses = account.evaluate_trades() results = BacktestResult(start, stop, step, start_value, finish_value, account.fees, account.txns, gains, losses, bot.out_of_btc, bot.hit_coin_limit, high, low) results.print_results() return results
def buy_and_hold(interval, coins, db_loc, step, balances): start, stop = interval account = Account(balances) # one share of each alt, one share of BTC btc_per_coin = account.balance('BTC') / (len(coins) + 1) with_fees = btc_per_coin - (btc_per_coin * 0.0025) db = create_db(db_loc) sess = new_session(db) for coin in coins: price = Ticker.current_ask(sess, coin, now=start) if not price: continue to_buy = with_fees / price cost = account.trade(coin, to_buy, price, start) account.update('BTC', cost, start) start_value = account_value_btc(sess, account, now=start) finish_value = account_value_btc(sess, account, stop) low = min(start_value, finish_value) high = max(start_value, finish_value) results = BacktestResult(start, stop, step, start_value, finish_value, account.fees, account.txns, gain_txns=[], loss_txns=[], out_of_btc=0, hit_coin_limit=0, high=high, low=low) log.debug("\nBuy and hold\n") results.print_results() return results
from livre import create_app from db import create_db app = create_app() with app.app_context(): create_db(app.db)
if args.mode != MODE_INIT: if db.get_config('initialized') is None: prompt.fail('Please initialize database first (soma init)') exit(1) if args.mode == MODE_INIT: if db.get_config('initialized') is not None: prompt.fail('Database is already initialized!') exit(1) else: try: current_user = pwd.getpwuid(os.getuid()).pw_name soma_user = prompt.string('Please provide soma master username (blank to use `%s`): ' % current_user, default=current_user, pattern=prompt.Validators.username) soma_path = prompt.directory('Directory to create problems (blank to use `/home`): ', default='/home') db.create_db(soma_user, soma_path) prompt.success('Database is successfully initialized') except Exception as err: prompt.fail('Failed to create database') prompt.show(err) exit(1) elif args.mode == MODE_ADD: # TODO: prevent injection # TODO: revert changes on fail check_root() # common config prob_source = prompt.string('Problem source: ', pattern=prompt.Validators.no_space) prob_name = prompt.string('Problem name: ', pattern=prompt.Validators.no_space) prob_type = prompt.choice('Problem type (local / remote): ', ('local', 'remote'))
EXT_TO_MIMETYPE = { "png": "image/png", "gif": "image/gif", "jpg": "image/jpeg", "jpeg": "image/jpeg", } def static_cache(filepath, create_data_func, mimetype="text/plain"): if not os.path.exists(filepath): with open("public/" + filepath, "wb") as f: f.write(create_data_func()) return bottle.static_file(filepath, root="public/", mimetype=mimetype) app = bottle.Bottle(autojson=True) db_plugin = bottle_sqlalchemy.Plugin(create_db("sqlite:///db.sqlite3")) app.install(db_plugin) @app.route("/") def index(db): return bottle.static_file("index.html", root="public/") @app.get("/posts") def posts(db): result = [] keyword = bottle.request.params.keyword.strip() if keyword != "": cond = "%{0}%".format(keyword) result = [{"id": r.id, "text": r.text, "url": "{}.{}".format(r.id, r.ext)} for r in db.query(Post).filter(Post.text.like(cond)).all()]
logger.info(f'Answer ready for {update.effective_user.name} ' f'at {update.effective_message.date}') def run(): get_request = requests.get('http://pubproxy.com/api/proxy?limit=1&' 'format=txt&port=8080&level=anonymous&' 'type=socks5&country=FI|NO|US&' 'https=True') proxy_response = get_request.text if proxy_response != 'No proxy': REQUEST_KWARGS = {'proxy_url': f'https://{proxy_response}'} else: logger.info( f"Using proxy by default: " f"{CONFIG['DEFAULT_REQUEST_KWARGS']}") REQUEST_KWARGS = CONFIG['DEFAULT_REQUEST_KWARGS'] updater = Updater(CONFIG['TOKEN_ID'], request_kwargs=REQUEST_KWARGS, use_context=True) add_handlers(updater) updater.start_polling() if __name__ == '__main__': create_db() run()
def init_db(): result = dict(error='', result='ok') create_db() return jsonify(result)
def run_ainsert(): t1 = Thread(target=ainsert, args=(range(1, 11), 1, 0.1)) t2 = Thread(target=ainsert, args=(range(11, 21), 2, 0.2)) t1.start() t2.start() t1.join() t2.join() def print_data(): conn = psycopg2.connect(database=settings.DB_NAME, user=settings.DB_USER, password=settings.DB_PASSWORD, host=settings.DB_HOST, async_=0) curs = conn.cursor() curs.execute(f"select * from {DB_NAME}") for row in curs.fetchall(): print(f'id={row[0]}, tid={row[1]}') curs.close() conn.close() with create_db(DB_NAME): run_ainsert() print_data()
def __init__(self): # rearemos una lista donde guardaremos los datos de nuestra agenda self.contactos = [] connect() create_db()
from db import create_db if __name__ == "__main__": create_db()
print print json.dumps({"error":message}) sys.exit(0) form = cgi.FieldStorage() query = urlparse.parse_qs(os.getenv("QUERY_STRING") or "") action = form.getfirst("action") username = form.getfirst("username") password = form.getfirst("password") # Unathenticated actions - create a new user, and get list of supported banks if action == "newuser": try: if username and password: json_print(db.create_db(username, password)); sys.exit(0) else: exit_error(400, "Incomplete username/password") except Exception, e: exit_error(500, "Couldn't create new user " + e) elif action == "getbanks": json_print(config.banks) sys.exit(0) # Get user's session from cookies if we can. # If so, decrypt and load the session file, and pull out username/password sessionfn = None if os.getenv("HTTP_COOKIE"): try: cookies = Cookie.SimpleCookie()
def __init__(self): self.driver = Action() create_db() self.session = init_db()
#!/usr/bin/python2.7 import db import os db.create_db()
print '\nOptional Options:\n' print '-f Path to the local folder (default: current folder)\n' if __name__ == '__main__': flag = 0 first_time = 'x' while (first_time != 'y') and (first_time != 'n'): first_time = raw_input('Fetch content from Drive to local folder? [y/n]: ') if len(sys.argv) == 1: path = os.getcwd() flag = 1 elif len(sys.argv) == 3: if sys.argv[1] == '-f': path = sys.argv[2] flag = 1 else: helpmenu() else: helpmenu() if flag > 0: drive_service = authorize() log_file = open('log', 'wb', 0) write_str = 'Monitoring folder: ' + path + '\n' log_file.write(write_str) db = create_db(path, drive_service, log_file) if first_time == 'y': print 'Downloading all files and folders from Drive to your local folder...' mirror(drive_service, db.drivedb, log_file) watch(path, drive_service, db, log_file)
def parse_excel_file(file): ''' YENI :param file: String, Path to excel file. :return: Array ''' excel_file = open_workbook(file, encoding_override='utf-8') def prepare_mapping(): s = excel_file.sheet_by_name("Mapping") for x in range(1, s.nrows): db.session.add( db.Lead(account=s.cell(x, 0).value, account_name=s.cell(x, 1).value, lead_code=s.cell(x, 2).value, name=s.cell(x, 3).value)) db.session.commit() # def define_variables(): # # sheet = excel_file.sheet_by_index(0) # # py1 = sheet.cell(7, 2).value if not sheet.cell(7, 2).ctype is 0 else '' # # db.periods['cy'] = sheet.cell(9, 2).value # db.periods['py2'] = sheet.cell(8, 2).value # db.periods['py1'] = py1 # db.tanimlar['company'] = sheet.cell(6, 2).value def define_variables(): sheet = excel_file.sheet_by_index(0) db.tanimlar['company'] = sheet.cell(6, 2).value for sheet in excel_file.sheets()[2:]: db.periodss.append(sheet.name) donus = dict() define_variables() db.Hesaplar=db.make_hesaplar() db.create_db() prepare_mapping() for i in range(2, excel_file.nsheets): sheet = excel_file.sheet_by_index(i) key = sheet.name for row in range(1, sheet.nrows): temp = [] hucre = str(int(sheet.cell_value(row, 0))) if type(sheet.cell_value(row, 0)) is float else \ sheet.cell_value(row, 0) if hucre not in donus: donus[hucre] = dict() for col in range(sheet.ncols): if col is 0 and type(sheet.cell_value(row, col)) is float: temp.append(str(int(sheet.cell_value(row, col)))) # burasi duzeltilecek bug var. else: temp.append(sheet.cell_value(row, col)) gecici = db.session.query(db.Hesaplar).filter_by(number=hucre).first() ana_hesap = hucre[:3] lead_cod = None if db.session.query(db.Lead).filter_by(account=ana_hesap).first() is not None: lead_cod = db.session.query(db.Lead).filter_by(account=ana_hesap).first().lead_code if gecici is not None: setattr(gecici,key,temp[4]) print(hucre, ' update') else: db.session.add( db.Hesaplar(number=hucre, name=temp[1], len=len(hucre), ana_hesap=ana_hesap, lead_code=lead_cod, **{key: temp[4]})) print(hucre, ' add') db.session.commit()
def test_create_db(self): with flask_app.test_request_context(): create_db() print('OK - test_create_db')
res.append({ 'id': item['id'], 'fio': f"{item['last_name']} {item['name']} {item['surname']}", 'phone': item['phone'], 'price': item['price'], 'time': item['time'], 'payment_type': item['payment_type'] + ',', 'services': [ item['sname'], ] }) return res @app.route('/rejected', methods=['GET']) def rejected(): return render_template('rejected.html') @app.route('/success', methods=['GET']) def success(): return render_template('success.html') if __name__ == '__main__': db.create_db() time.sleep(5) db.fill_db() app.secret_key = 'super secret key' app.run(debug=True, host='0.0.0.0', port='8008')
from flask import Flask from dotenv import find_dotenv, load_dotenv from db import create_db from routes.index import index_blueprint as index_route from routes.login import login_blueprint as login_route from routes.signup import signup_blueprint as signup_route load_dotenv(find_dotenv('.env')) app = Flask(__name__) app.register_blueprint(index_route) app.register_blueprint(login_route) app.register_blueprint(signup_route) if __name__ == '__main__': with app.app_context(): db = create_db() db.init_app(app) db.create_all() app.run('localhost', port=6000, debug=True)
L.append('`%s`=?' % k) args.append(arg) pk = self.__primary_key__.name args.append(getattr(self, pk)) db.update('update `%s` set %s where %s=?' % (self.__table__, ','.join(L), pk), *args) return self def delete(self): self.pre_delete and self.pre_delete() pk = self.__primary_key__.name args = (getattr(self, pk), ) db.update('delete from `%s` where `%s`=?' % (self.__table__, pk), *args) return self def insert(self): self.pre_insert and self.pre_insert() params = {} for k, v in self.__mappings__.iteritems(): if v.insertable: if not hasattr(self, k): setattr(self, k, v.default) params[v.name] = getattr(self, k) db.insert('%s' % self.__table__, **params) return self if __name__=='__main__': logging.basicConfig(level=logging.DEBUG) db.create_db("root", "","test", "127.0.0.1") db.update('drop table if exists user') db.update('create table user (id int primary key, name text, email text, passwd text, last_modified real)')