def do_insert(flag_queue, task_queue, res_queue, db_info): logger.info('Starting: do_insert()') insert_done_sql = db_info['sqls']['insert-done'] delete_todo_sql = db_info['sqls']['delete-todo'] done_conn = gen_db_conn(db_info['done-db']) todo_conn = gen_db_conn(db_info['todo-db']) while not should_die(flag_queue): try: record = task_queue.get() _do_insert(done_conn, insert_done_sql, record) execute_sql(todo_conn, delete_todo_sql, (record[0], ), commit=True) res_queue.put(True) except Exception: res_queue.put(False) done_conn.close() done_conn = gen_db_conn(db_info['done-db']) todo_conn.close() todo_conn = gen_db_conn(db_info['todo-db']) logger.error('loop_insert requests.Exception: %r' % traceback.format_exc()) time.sleep(0.5) done_conn.close() todo_conn.close() logger.warning('@@@ Exit: do_insert()')
def post(self, request, app_name, env_name, app_path): action = request.data['action'] if action == 'rename': env_path = _get_existent_env_path(app_path, env_name) new_env_name = request.data['name'] check_name(new_env_name) new_env_path = _get_absent_env_path(app_path, new_env_name) stop_patsaks(get_id(request.dev_name, app_name, env_name)) write_file(new_env_path, new_env_name) schema_prefix = get_id(request.dev_name, app_name) + ':' execute_sql('SELECT ak.rename_schema(%s, %s)', (schema_prefix + env_name.lower(), schema_prefix + new_env_name.lower())) os.remove(env_path) return HttpResponse() if action == 'eval': request.lock.release() request.lock = None if env_name == _RELEASE_ENV_NAME: env_name = None response = send_to_ecilop( 'EVAL ' + get_id(request.dev_name, app_name, env_name), request.data['expr']) assert response status = response[0] result = response[1:] assert status in ('E', 'F', 'S') if status == 'E': raise Error(result, status=NOT_FOUND) return {'ok': status == 'S', 'result': result} raise Error('Unknown action: "%s"' % action)
def process_msg(self, body, conn, sql): params = body.get('params') # task_uuid = params.get('external_id') client_id = params.get('additional_info').get('client_id') thunder_hash = params.get('thunder_hash') digest = params.get('digest') # URL url = params.get('url') url_loc = url.get('location') url_hash = url.get('hash') # Seed seed_file = params.get('seed_file', {}) seed_hash = seed_file.get('hash', '') swift_path = seed_file.get('path', '') algorithm = params.get('digest_algorithm') mime_type = params.get('mime_type') file_name = params.get('file_name') file_size = params.get('file_size') digest = url_hash if url_hash else seed_hash record = (cfg['custom-type'], client_id, thunder_hash, url_loc, digest, algorithm, mime_type, file_name, file_size, swift_path) try: execute_sql(conn, sql, record, commit=True) except mdb.IntegrityError: pass
def process_msg(self, body, db_conn, sql): if isinstance(body, (str, unicode)): body = json.loads(body) params = body.get('params') task_uuid = params.get('external_id') client_id = params.get('additional_info').get('client_id') thunder_hash = params.get('thunder_hash') digest = params.get('digest') # URL url = params.get('url') url_loc = url.get('location') url_hash = url.get('hash') # Seed seed_file = params.get('seed_file', {}) seed_hash = seed_file.get('hash', '') swift_path = seed_file.get('path', '') algorithm = params.get('digest_algorithm') mime_type = params.get('mime_type') file_name = params.get('file_name') file_size = params.get('file_size') if cfg['vddb-async']['should-insert']: logger.info('Insert to vddb: %s' % task_uuid) insert_vddb(cfg['vddb-async']['url'], [thunder_hash, url_hash, seed_hash], logger) record = (cfg['custom-type'], client_id, thunder_hash, url_loc, digest, algorithm, mime_type, file_name, file_size, swift_path) try: execute_sql(db_conn, sql, record) except mdb.IntegrityError: pass
def delete(self, request, app_name, env_name, app_path): env_path = _get_existent_env_path(app_path, env_name) stop_patsaks(get_id(request.dev_name, app_name, env_name)) os.remove(env_path) execute_sql('SELECT ak.drop_schema(%s)', (get_id(request.dev_name, app_name, env_name), )) return HttpResponse()
def post(self, request, app_name, env_name, app_path): action = request.data['action'] if action == 'rename': env_path = _get_existent_env_path(app_path, env_name) new_env_name = request.data['name'] check_name(new_env_name) new_env_path = _get_absent_env_path(app_path, new_env_name) stop_patsaks(get_id(request.dev_name, app_name, env_name)) write_file(new_env_path, new_env_name) schema_prefix = get_id(request.dev_name, app_name) + ':' execute_sql( 'SELECT ak.rename_schema(%s, %s)', (schema_prefix + env_name.lower(), schema_prefix + new_env_name.lower())) os.remove(env_path) return HttpResponse() if action == 'eval': request.lock.release() request.lock = None if env_name == _RELEASE_ENV_NAME: env_name = None response = send_to_ecilop( 'EVAL ' + get_id(request.dev_name, app_name, env_name), request.data['expr']) assert response status = response[0] result = response[1:] assert status in ('E', 'F', 'S') if status == 'E': raise Error(result, status=NOT_FOUND) return {'ok': status == 'S', 'result': result} raise Error('Unknown action: "%s"' % action)
def do_insert(flag_queue, task_queue, res_queue, db_info): logger.info('Starting: do_insert()') insert_done_sql = db_info['sqls']['insert-done'] delete_todo_sql = db_info['sqls']['delete-todo'] done_conn = gen_db_conn(db_info['done-db']) todo_conn = gen_db_conn(db_info['todo-db']) while not should_die(flag_queue): try: record = task_queue.get() _do_insert(done_conn, insert_done_sql, record) execute_sql(todo_conn, delete_todo_sql, (record[0],), commit=True) res_queue.put(True) except Exception: res_queue.put(False) done_conn.close() done_conn = gen_db_conn(db_info['done-db']) todo_conn.close() todo_conn = gen_db_conn(db_info['todo-db']) logger.error('loop_insert requests.Exception: %r' % traceback.format_exc()) time.sleep(0.5) done_conn.close() todo_conn.close() logger.warning('@@@ Exit: do_insert()')
def load_osm(DATA_DIR, src_file, credentials_dict, sql_file, engine): """ Load OpenStreetMap data to database Parameters ---------- DATA_DIR : str Directory where raw data are stored locally src_file : str Name of .pbf file credentials_dict : dict Dictionary of credential elements sql_file : str Name of SQL file for updating OSM data engine : SQLAlchemy engine object Returns ------- None """ os.chdir(DATA_DIR) # Automatically uploads to PUBLIC schema command = 'osm2pgsql --slim --hstore -d ' + credentials_dict['dbname'] \ + ' -H ' + credentials_dict['host'] + ' -P ' + credentials_dict['port'] \ + ' -U ' + credentials_dict['user'] + ' ' + src_file print(f"Uploading file {src_file}") subprocess.call(command, shell=True) print("Done") # Rename the files and move to RAW schema execute_sql(sql_file, engine, read_file=True)
def tearDown(self): execute_sql('SELECT ak.drop_all_schemas()') for dev_name in os.listdir(ROOT.devs): if dev_name != ANONYM_NAME: execute_sql('DROP TABLESPACE IF EXISTS "%s"' % dev_name) Popen('sudo rm -r %s %s' % (ROOT.devs, ROOT.trash), shell=True) shutil.rmtree(ROOT.locks) shutil.rmtree(ROOT.domains)
def create_cnn_sample(sql_dir, engine, min_pings_init, min_dist): params = {} # Set all parameters for sql file params['min_pings_init'] = int(min_pings_init) params['min_dist'] = float(min_dist) sql_file = sql_dir / 'create_sample_trajectories.sql' execute_sql(sql_file, engine, read_file=True, params=params) print('Created table of sample trajectories for CNN.')
def post(self, request, app_name, app_path): env_name = request.data['name'] check_name(env_name) env_path = _get_absent_env_path(app_path, env_name) execute_sql(CREATE_SCHEMA_SQL, (get_id(request.dev_name, app_name, env_name), )) write_file(env_path, env_name) return HttpResponse(status=CREATED)
def delete(self, request, app_name, env_name, app_path): env_path = _get_existent_env_path(app_path, env_name) stop_patsaks(get_id(request.dev_name, app_name, env_name)) os.remove(env_path) execute_sql( 'SELECT ak.drop_schema(%s)', (get_id(request.dev_name, app_name, env_name),)) return HttpResponse()
def post(self, request, app_name, app_path): env_name = request.data['name'] check_name(env_name) env_path = _get_absent_env_path(app_path, env_name) execute_sql( CREATE_SCHEMA_SQL, (get_id(request.dev_name, app_name, env_name),)) write_file(env_path, env_name) return HttpResponse(status=CREATED)
def delete(): query = ("UPDATE reminders SET deleted = %(deleted)s WHERE id = %(id)s") data = {'id': request.args.get('id'), 'deleted': 1} execute_sql(query, data, True, None) return jsonify({"result": "Reminder Deleted Successfully"})
def uncomplete(): query = ( "UPDATE reminders SET completed = %(completed)s WHERE id = %(id)s") data = {'id': request.args.get('id'), 'completed': 'No'} execute_sql(query, data, True, None) return jsonify({"result": "Reminder marked as completed."})
def _do_insert(done_conn, insert_done_sql, record): rid, thunder_hash, digest = record[0], record[3], record[5] try: logger.info('inserting mysql %d' % rid) execute_sql(done_conn, insert_done_sql, record[1:-1], commit=True) logger.info('inserting vddb %d' % rid) insert_vddb(cfg['vddb-async-url'], [thunder_hash, digest], logger) logger.info('inserted mysql %d' % rid) except mdb.IntegrityError: pass except Exception: logger.error('_do_insert requests.Exception: %r' % traceback.format_exc()) time.sleep(0.5)
def db_init(): init_sqls = [ "DROP TABLE IF EXISTS user", "CREATE TABLE user (\ id int primary key auto_increment,\ username varchar(50) NOT NULL,\ password varchar(50) NOT NULL,\ sign_prepared text,\ sign_val float\ );", "INSERT INTO user (username,password) VALUES ('admin','admin');" ] for i in init_sqls: execute_sql(i) return 'init success'
def _add_relation(self,statement): luid = statement[0][0] ruid = statement[1][0] riuid = statement[2][0] lc = statement[0][1] r = statement[1][1] ric = statement[2][1] sql = """ INSERT INTO statements (left_UID,left_concept,relation_UID,relation,right_UID,right_concept) VALUES ({left_uid},'{left_concept}',{relation_uid},'{relation}',{right_uid},'{right_concept}') """.format(left_uid=luid,left_concept=lc,relation_uid=ruid,relation=r,right_uid=riuid,right_concept=ric) utils.execute_sql(self.db_file,sql) _FACTORY.add_relation(luid,ruid,riuid)
def trakt_episode_cross_reference(): trakt_id = 176105 source_id = 2 trakt_api = TraktAPI() if trakt_api.authenticate(): try: show_ids = [x[0] for x in utils.execute_sql("select", select={"media_id"}, table={"alternateIds"}, where={"alternateId": trakt_id, "mediaType": 2})] logger.debug("Possible Show Ids Found from Existing - %s" % show_ids) except Exception as e: logger.error('Error on line {} - {} - {}'.format(type(e).__name__, sys.exc_info()[-1].tb_lineno, e)) show_ids = [] if not len(show_ids): logger.debug("Show Not Found in Database, Grabbing Show Info") show = trakt_api.get_show_info(trakt_id) try: show_year = show['first_aired'][:4] except Exception as e: logger.error('Error on line {} - {} - {}'.format(type(e).__name__, sys.exc_info()[-1].tb_lineno, e)) show_year = None show_id = utils.add_show(show['title'], show_year, show['status']) if show_id is not None: for x in show['ids']: utils.add_alternate_id(show_id, utils.get_media_type("show"), utils.get_alternate_key_id(x), show['ids'][x]) show_ids = [show_id] seasons = trakt_api.get_season_info(trakt_id) for season in seasons: episodes = season['episodes'] for episode in episodes: season_number = episode['season'] episode_number = episode['number'] episode_full = trakt_api.get_episode_info(trakt_id, season_number, episode_number) logger.info(episode_full) source_media_id = episode_full['ids']['trakt'] title = episode_full['title'] show_id = show_ids[0] try: episode_airdate = datetime.strptime(episode_full['first_aired'], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=pytz.UTC).astimezone(pytz.timezone(config.timezone)).strftime("%Y-%m-%d %H:%M") except Exception as e: logger.error('Error on line {} - {} - {}'.format(type(e).__name__, sys.exc_info()[-1].tb_lineno, e)) episode_airdate = None alternate_show_id = utils.execute_sql("select", select={"alternateId, media_id"}, table={"alternateIds"}, where={"media_id": show_id, "mediaType": 2, "alternateKeyId": 8})[0][0] media_id = utils.add_episode(alternate_show_id, episode_full['season'] + 6, episode_full['number'], episode_airdate, episode_full['runtime'], episode_full['title']) utils.execute_sql("insert", table={"episodeCrossReference"}, values={"source_id": source_id, "mediaType": 4, "showID": show_id, "season_number": season_number, "episode_number": episode_number, "title": title, "source_media_id": source_media_id, "episodeId": media_id, "airDate": episode_airdate, "runtime": episode_full['runtime']}, returnValue="id") logger.debug("MediaId Found: %s" % media_id) if media_id is not None: for x in episode_full['ids']: utils.add_alternate_id(media_id, utils.get_media_type("episode"), utils.get_alternate_key_id(x), episode_full['ids'][x])
def create_dev(dev_name=None): with ROOT.locks.drafts.acquire_exclusive(): draft_name = os.readlink(ROOT.drafts.curr) os.symlink(str(int(draft_name) + 1), ROOT.drafts.next) os.rename(ROOT.drafts.next, ROOT.drafts.curr) dev_name = dev_name or ANONYM_PREFIX + draft_name dev_path = ROOT.devs[dev_name] os.rename(ROOT.drafts[draft_name], dev_path) os.symlink('../apps', dev_path.grantors[dev_name]) touch_file(ROOT.locks[dev_name]) execute_sql('CREATE TABLESPACE "%s" LOCATION \'%s\'' % (dev_name.lower(), dev_path.tablespace)) create_app(dev_name, SAMPLE_NAME) return dev_name
def run(self): # check existence of user sql = 'select user_name from {schema}.{table} where user_name = "{user_name}"'.format(schema = SCHEMA, table = table_name_set['user'], user_name = self.user_name) result, msgs = execute_sql(db_info=db_info_tradingview, operation_name='select', sql=sql, lock=self.lock) write_in_log(file_location=LOG_FILE, msgs=msgs, lock=self.lock) if not result: sql = 'insert into {schema}.{table}(user_name, accuracy_so_far) values ("{user_name}", 0)'.format(schema = SCHEMA, table = table_name_set['user'], user_name = self.user_name) result, msgs = execute_sql(db_info=db_info_tradingview, operation_name='insert', sql=sql, lock=self.lock) write_in_log(file_location=LOG_FILE, msgs=msgs, lock=self.lock) targetURL = 'https://www.tradingview.com/u/{}/'.format(self.user_name) write_in_log(file_location=LOG_FILE, msgs=['start crawling {}\'s posts\n'.format(targetURL)], lock = self.lock) getAllPostsInMarket(targetURL, crawl_status = 'alluser', lock=self.lock) write_in_log(file_location=LOG_FILE, msgs=['finish crawling {}\'s posts\n'.format(targetURL)], lock = self.lock)
def login(_data): true_data = execute_sql( "SELECT sign_prepared, sign_val FROM user WHERE username=%(username)s", username=session['username']) result, new_prepared = DynamicProcess.match(json.loads(true_data[0]), float(true_data[1]), _data, limit=0.6) if result: execute_sql( 'UPDATE user SET sign_val=%(sign_v)s WHERE username=%(username)s', sign_v=new_prepared, username=session['username']) session['logged_in'] = True return str(result)
def get_trend(start_time, end_time, crypto_type, **kwargs): assert isinstance(start_time, datetime), "type of start_time is not datetime" assert isinstance(end_time, datetime), "type of end_time is not datetime" assert crypto_type in PRICE_TABLES.keys( ), "{} is not in price table keys".format(crypto_type) assert 'lock' in kwargs.keys(), "lock is not in kwargs keys" sql = 'select CLOSE from {}.{} where DATE between "{}" and "{}" order by DATE'.format( PRICE_SCHEMA, PRICE_TABLES[crypto_type], (start_time - timedelta(days=1)).strftime("%Y-%m-%d"), (end_time + timedelta(days=1)).strftime("%Y-%m-%d")) result, msg = execute_sql(db_info_price_data, 'select', sql, lock=kwargs['lock']) write_in_log(ANALYSIS_LOG_FILE, msg, lock=kwargs['lock']) assert result, "no price data from {} to {}".format( (start_time - timedelta(days=1)).strftime("%Y-%m-%d"), (end_time + timedelta(days=1)).strftime("%Y-%m-%d")) close_price_list = [i['CLOSE'] for i in result] print(close_price_list) print(diff(close_price_list)) return [ 1 if i > 0 else 0 if i == 0 else -1 for i in diff(close_price_list) ]
def update(): query = ( "UPDATE reminders SET title = %(title)s, reminder = %(reminder)s, date = %(date)s WHERE id = %(id)s" ) data = { 'id': request.args.get('id'), 'title': request.form['title'], 'reminder': request.form['edit-reminder'], 'date': request.form['date'] } execute_sql(query, data, True, None) return redirect(url_for('dashboard'))
def post(self, request): dev_name = request.data['name'] check_name(dev_name) if dev_name.startswith(ANONYM_PREFIX): raise Error( 'Names starting with "%s" are reserved.' % ANONYM_PREFIX, 'Please choose another name.') try: user = User.objects.get(username__iexact=dev_name) except User.DoesNotExist: pass else: raise Error( 'The user "%s" already exists.' % user.username, 'Name must be be case-insensitively unique.') email = request.data['email'] try: validate_email(email) except ValidationError: raise Error('The email "%s" is incorrect.' % email, 'Please correct the email.') if User.objects.filter(email=email): raise Error( 'The email "%s" has already been taken.' % email, 'Please choose another email.') user = User.objects.create_user( dev_name, email, request.data['password']) user.save() if request.is_half_anonymous: _stop_dev_patsaks(request.dev_name) old_dev_path = ROOT.devs[request.dev_name] dev_path = ROOT.devs[dev_name] os.rename(old_dev_path, dev_path) os.mkdir(old_dev_path) os.symlink(dev_path.tablespace, old_dev_path.tablespace) os.rename( dev_path.grantors[request.dev_name], dev_path.grantors[dev_name]) os.rename(ROOT.locks[request.dev_name], ROOT.locks[dev_name]) execute_sql( 'SELECT ak.rename_dev(%s, %s)', (request.dev_name, dev_name.lower())) else: create_dev(dev_name) user.backend = AUTHENTICATION_BACKENDS[0] auth.login(request, user) return HttpResponse(status=CREATED)
def _add_concept(self,concept): sql = """ INSERT INTO concepts (concept) VALUES ('{concept}') """.format(concept=concept) uid = utils.execute_sql(self.db_file,sql) if not self.concepts.has_key(uid): _FACTORY.new_node(uid,concept) return uid
def make_views(self): view_statement = "" # print(self.df_layout.head(200)) for row in self.df_layout.itertuples(): if (row.is_first_column): view_statement = self.view_sql_start(row.view_name) if row.column_name != self.primary_key: view_statement += self.view_sql_column(row.column_name, row.view_column_name, row.is_last_column) if (row.is_last_column): view_statement += self.view_sql_end(row.table_name) execute_sql(self.engine, view_statement)
def _add_concept(self, concept): sql = """ INSERT INTO concepts (concept) VALUES ('{concept}') """.format(concept=concept) uid = utils.execute_sql(self.db_file, sql) if not self.concepts.has_key(uid): _FACTORY.new_node(uid, concept) return uid
def do_post(): title = request.forms.title content = request.forms.content id = request.forms.id if not id: LOG.debug('add new post...', id) created_time = datetime.now() modified_time = created_time execute_sql('insert into blog values (?,?,?,?,?)' , (None, title, content, created_time, modified_time)) redirect('/') else: LOG.debug('post id is: %s', id) modified_time = datetime.now() execute_sql('update blog set title=?, content=?, last_modified_time=? where id=?' , (title, content, modified_time, id)) redirect('/post/%s' % id)
def update(_data): true_data = execute_sql( "SELECT sign_prepared, sign_val FROM user WHERE username=%(username)s", username=session['username']) result = False if true_data: result, new_prepared = DynamicProcess.match(json.loads(true_data[0]), float(true_data[1]), _data, limit=0.6) if result: execute_sql( 'UPDATE user SET sign_val=%(sign_v)s WHERE username=%(username)s', sign_v=new_prepared, username=session['username']) return 'success' return 'failure'
def accuracy_analysis(username, crypto_type='btc', **kwargs): assert 'lock' in kwargs.keys(), 'lock is not in kwargs keys' sql = 'select accuracy, user_id from {schema}.{accuracy_table} t1 join {schema}.{user_table} t2 where t1.user_id=t2.id and t2.user_name="{user_name}"'.format( schema=SCHEMA, accuracy_table=table_name_set['accuracy'], user_table=table_name_set['user'], user_name=username) accuracy_result, msg = execute_sql(db_info_tradingview, 'select', sql, lock=kwargs['lock']) write_in_log(ANALYSIS_LOG_FILE, msg, lock=kwargs['lock']) if accuracy_result: accuracy_df = DataFrame(accuracy_result) ema_accuracy = accuracy_df.ewm(span=len(accuracy_df.index), adjust=False).mean() accuracy_of_prediction = list(ema_accuracy['accuracy'])[-1] else: accuracy_of_prediction = 0 sql = 'select id from {}.{} where user_id={}'.format( FEATURE_SCHEMA, FEATURE_TABLE_SET['text'], accuracy_result[0]['user_id']) accuracy_feature_result, msg = execute_sql(db_info_tradingview, 'select', sql, lock=kwargs['lock']) write_in_log(ANALYSIS_LOG_FILE, msg) if accuracy_feature_result: sql = 'update {}.{} set {}_accuracy = {} where id={}'.format( FEATURE_SCHEMA, FEATURE_TABLE_SET['text'], crypto_type, accuracy_of_prediction, accuracy_feature_result[0]['id']) _, msg = execute_sql(db_info_tradingview, 'update', sql, lock=kwargs['lock']) else: sql = 'insert into {}.{}(user_id, {}_accuracy) values ({},{})'.format( FEATURE_SCHEMA, FEATURE_TABLE_SET['text'], crypto_type, accuracy_result[0]['user_id'], accuracy_of_prediction) _, msg = execute_sql(db_info_tradingview, 'insert', sql, lock=kwargs['lock']) write_in_log(ANALYSIS_LOG_FILE, msg)
def pre_login(): username = request.form['username'] ret = execute_sql('SELECT username FROM user WHERE username=%(username)s', username=username) if ret is None or len(ret) == 0: return 'this username hasn\'t been registered yet' session['username'] = username.replace('\"', '') session['action'] = 'login' return redirect('/static/sigpad.html')
def _add_relation(self, statement): luid = statement[0][0] ruid = statement[1][0] riuid = statement[2][0] lc = statement[0][1] r = statement[1][1] ric = statement[2][1] sql = """ INSERT INTO statements (left_UID,left_concept,relation_UID,relation,right_UID,right_concept) VALUES ({left_uid},'{left_concept}',{relation_uid},'{relation}',{right_uid},'{right_concept}') """.format(left_uid=luid, left_concept=lc, relation_uid=ruid, relation=r, right_uid=riuid, right_concept=ric) utils.execute_sql(self.db_file, sql) _FACTORY.add_relation(luid, ruid, riuid)
def add_reminder(): query = ( "INSERT INTO reminders " "(id, title, reminder, date, completed) " "VALUES (%(id)s, %(title)s, %(reminder)s, %(date)s, %(completed)s)") data = { 'id': random.randint(1000, 1999), 'title': request.form['title'], 'reminder': request.form['create-reminder'], 'date': request.form['date'], 'completed': 'No' } execute_sql(query, data, True, "None") return redirect(url_for('dashboard'))
def pre_update(): username = request.form['username'] password = request.form['password'] username = username.replace('\"', '') ret = execute_sql('SELECT password FROM user WHERE username=%(username)s', username=username) if ret[0] != password: return render_template('wrong_pass.html') session['action'] = 'update' return redirect('/static/sigpad.html')
def post(self, request): dev_name = request.data['name'] check_name(dev_name) if dev_name.startswith(ANONYM_PREFIX): raise Error( 'Names starting with "%s" are reserved.' % ANONYM_PREFIX, 'Please choose another name.') try: user = User.objects.get(username__iexact=dev_name) except User.DoesNotExist: pass else: raise Error('The user "%s" already exists.' % user.username, 'Name must be be case-insensitively unique.') email = request.data['email'] try: validate_email(email) except ValidationError: raise Error('The email "%s" is incorrect.' % email, 'Please correct the email.') if User.objects.filter(email=email): raise Error('The email "%s" has already been taken.' % email, 'Please choose another email.') user = User.objects.create_user(dev_name, email, request.data['password']) user.save() if request.is_half_anonymous: _stop_dev_patsaks(request.dev_name) old_dev_path = ROOT.devs[request.dev_name] dev_path = ROOT.devs[dev_name] os.rename(old_dev_path, dev_path) os.mkdir(old_dev_path) os.symlink(dev_path.tablespace, old_dev_path.tablespace) os.rename(dev_path.grantors[request.dev_name], dev_path.grantors[dev_name]) os.rename(ROOT.locks[request.dev_name], ROOT.locks[dev_name]) execute_sql('SELECT ak.rename_dev(%s, %s)', (request.dev_name, dev_name.lower())) else: create_dev(dev_name) user.backend = AUTHENTICATION_BACKENDS[0] auth.login(request, user) return HttpResponse(status=CREATED)
def detail(id): blogs = execute_sql('select id, title, created_time, content from blog where id =?', (id,)) if not len(blogs): raise HTTPError(404, 'Blog does not exist.') LOG.debug('column created time type: %s', type(blogs[0]['created_time'])) #myapp.set_lang(['jp']) msg = myapp._('test i18n in py') LOG.debug('i18n msg: %s', msg) myapp.set_lang(['ja']) return {'blog': blogs[0], 'msg':msg, '_': myapp._}
def importer(mediaType, traktId, **kwargs): media_id = None try: trakt_api = TraktAPI() if trakt_api.authenticate(): if int(mediaType) == int(utils.get_media_type("movie")): movie = trakt_api.get_movie_info(trakt_api.find_id('movie', 'trakt', traktId)[0]['movie']['ids']['slug']) media_id = utils.add_movie(movie['title'], movie['year'], movie['released'], movie['runtime']) for x in movie['ids']: utils.add_alternate_id(media_id, mediaType, utils.get_alternate_key_id(x), movie['ids'][x]) elif int(mediaType) == int(utils.get_media_type("episode")): try: show_ids = [x[0] for x in utils.execute_sql("select", select={"mediaId"}, table={"alternateIds"}, where={"alternateId": traktId, "mediaType": 2})] logger.debug("Possible Show Ids Found from Existing - %s" % show_ids) except Exception as e: logger.error('Error on line {} - {} - {}'.format(type(e).__name__, sys.exc_info()[-1].tb_lineno, e)) show_ids = [] if not len(show_ids): logger.debug("Show Not Found in Database, Grabbing Show Info") show = trakt_api.get_show_info(traktId) try: show_year = show['first_aired'][:4] except Exception as e: logger.error('Error on line {} - {} - {}'.format(type(e).__name__, sys.exc_info()[-1].tb_lineno, e)) show_year = None show_id = utils.add_show(show['title'], show_year, show['status']) if show_id is not None: for x in show['ids']: utils.add_alternate_id(show_id, utils.get_media_type("show"), utils.get_alternate_key_id(x), show['ids'][x]) show_ids = [show_id] episode = trakt_api.get_episode_info(traktId, kwargs['seasonNumber'], kwargs['episodeNumber']) logger.debug(episode) try: episode_airdate = datetime.strptime(episode['first_aired'], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=pytz.UTC).astimezone(pytz.timezone(config.timezone)).strftime("%Y-%m-%d %H:%M") except Exception as e: logger.error('Error on line {} - {} - {}'.format(type(e).__name__, sys.exc_info()[-1].tb_lineno, e)) episode_airdate = None for show_id in show_ids: try: media_id = utils.add_episode(show_id, episode['season'], episode['number'], episode_airdate, episode['runtime'], episode['title']) logger.debug("MediaId Found: %s" % media_id) if media_id is not None: for x in episode['ids']: utils.add_alternate_id(media_id, utils.get_media_type("episode"), utils.get_alternate_key_id(x), episode['ids'][x]) break except Exception as e: logger.error('Error on line {} - {} - {}'.format(type(e).__name__, sys.exc_info()[-1].tb_lineno, e)) pass except Exception as e: logger.error('Error on line {} - {} - {}'.format(type(e).__name__, sys.exc_info()[-1].tb_lineno, e)) return media_id
def create_app(dev_name, app_name): app_path = ROOT.devs[dev_name].apps[app_name] if os.path.isdir(app_path): raise Error('The app "%s" already exists.' % read_file(app_path.name), 'App name must be case-insensitively unique.') release_schema_name = get_id(dev_name, app_name) execute_sql(CREATE_SCHEMA_SQL * 2, (release_schema_name, release_schema_name + ':debug')) os.mkdir(app_path) write_file(app_path.name, app_name) shutil.copytree(SAMPLE_PATH, app_path.code) os.mkdir(app_path.git) os.mkdir(app_path.envs) write_file(app_path.envs[INITIAL_ENV_NAME], INITIAL_ENV_NAME) write_file(app_path.domains, '[]') git_runner = GitRunner(dev_name, app_name, 'akshell', '*****@*****.**') git_runner.run('init', '--quiet') git_runner.run('add', '.') git_runner.run('commit', '--quiet', '-m', 'Initial commit.')
def post(self, request): user = auth.authenticate( username=request.data['name'].replace(' ', '-'), password=request.data['password']) if not user or not user.is_active: raise Error('Incorrect user name or password.') if request.is_half_anonymous: _stop_dev_patsaks(request.dev_name) execute_sql('SELECT ak.drop_schemas(%s)', (request.dev_name,)) execute_sql('DROP TABLESPACE "%s"' % request.dev_name) os.rename(ROOT.devs[request.dev_name], ROOT.trash[request.dev_name]) os.remove(ROOT.locks[request.dev_name]) auth.login(request, user) return { 'username': user.username, 'email': user.email, 'appNames': get_app_names(user.username), 'libNames': get_lib_names(user.username), 'config': json.loads(read_config(user.username)), }
def loop_insert(flag_queue, task_queue, res_queue, db_info): global loop_insert, loop_get logger.info('Starting: loop_insert() => %s' % (cfg['vddb-async-url'])) select_todo_sql = db_info['sqls']['select-todo'] select_limit = db_info['select-limit'] todo_conn = gen_db_conn(db_info['todo-db']) while not should_die(flag_queue): try: while not res_queue.empty(): ok = res_queue.get() rows = execute_sql(todo_conn, select_todo_sql, (select_limit, ), fetch=True) for record in rows: logger.info('Deleted mysql %d' % record[0]) task_queue.put(record) time.sleep(1) results = [] results_ok = 0 rows_len = len(rows) for i in range(rows_len): try: ok = res_queue.get(timeout=5) except Queue.Empty(): ok = False if ok: results_ok += 1 results.append(ok) if rows_len == 0: time.sleep(db_info['select-interval']) logger.info('One turn: (%d/%d), %r' % (results_ok, rows_len, results)) except KeyboardInterrupt: break except Exception: todo_conn.close() todo_conn = gen_db_conn(db_info['todo-db']) logger.error('loop_insert Exception: %r' % traceback.format_exc()) time.sleep(0.5) todo_conn.close() logger.warning('@@@ Exit: loop_insert()')
def edit(id): blogs = execute_sql('select id, title, created_time, content from blog where id =?', (id,)) if not len(blogs): raise HTTPError(404, 'Blog does not exist.') return {'blog': blogs[0]}
import sqlite3 from utils import execute_sql execute_sql('drop table if exists blog;') execute_sql(''' create table blog (id integer primary key, title varchar not null, content text, created_time timestamp, last_modified_time timestamp); ''')
def delete(id): LOG.info('delete blog #%s', id) execute_sql('delete from blog where id =?', (id,)) redirect('/')
def index(): blogs = execute_sql('select id, title, created_time, content from blog') return {'blogs':blogs}