def loadumls(): engine = RedisEngine(prefix='umls') data = pd.read_csv("../umls/[email protected]") df = pd.DataFrame(data) df_dictarr = df.to_dict('records') for item in df_dictarr: # print item['value'] # id, search phrase, data engine.store_json(item['value'], item['label'], { 'label': item['label'], 'value': item['value'] })
def load_redis_engine(): redis_url = urlparse.urlparse(settings.REDISTOGO_URL) if redis_url.scheme == "redis": engine = RedisEngine(host=redis_url.hostname, port=redis_url.port, password=redis_url.password) try: info = engine.client.info() if "db0" in info: nb_keys = info["db0"]["keys"] else: nb_keys = 0 print "Conn. Redis server, %s keys stored." % nb_keys return engine except ConnectionError: if settings.DEBUG: raise ConnectionError("Redis Server is not reachable.") else: return None else: if settings.DEBUG: raise RedisError("Redis Server '%s' URL is not valid." % settings.REDISTOGO_URL) else: return None
class LoadRedisWithLocationPrefixes(object): def __init__(self, config_name, locations_json_file, locations_geocoded_json_file): self.locations_json_file = locations_json_file self.locations_geocoded_json_file = locations_geocoded_json_file # The RedisEngine is instantiated that will perform 'autocompletion' # critical: No stop words are specified, as it was discovered via # testing that running with the default 'stop_words=None' still # associates a default set of stopwords to be utilized, one entry # of which is 'a' which precludes autocompletion when a simple 'a' # is provided. self.engine = RedisEngine( prefix=config[config_name].REDIS_AUTOCOMPLETE_SORTED_SET, stop_words=set(), cache_timeout=300, host=config[config_name].REDIS_HOSTNAME, port=config[config_name].REDIS_PORT, db=config[config_name].REDIS_DB, password=config[config_name].REDIS_PASSWORD, ) # self.engine = RedisEngine(prefix=config[config_name].REDIS_AUTOCOMPLETE_SORTED_SET, stop_words=None, cache_timeout=300, host=config[config_name].REDIS_HOSTNAME, port=config[config_name].REDIS_PORT, db=config[config_name].REDIS_DB, password=config[config_name].REDIS_PASSWORD) self.engine.flush() def load_locations_prefixes_into_redis(self): basepath = os.path.dirname(__file__) filepath = os.path.abspath(os.path.join(basepath, "..", "data", self.locations_json_file)) json_file = open(filepath, "r") # Iterate over all filmlocation json entries and extract the 'Locations' entry from the # json object, strip the phrase of all non-alphanumeric characters before inserting into # Redis. for json_string in json_file: json_object = json.loads(json_string) location = json_object["Locations"] if location and location.strip() != "None": location = re.sub(r"([^\s\w]|_|-)+", " ", location) # Store the filmid (unique), search phrase, full metadata for filmlocation self.engine.store_json(json_object["filmid"], location, json_object)
def __init__(self, config_name, locations_json_file, locations_geocoded_json_file): self.locations_json_file = locations_json_file self.locations_geocoded_json_file = locations_geocoded_json_file # The RedisEngine is instantiated that will perform 'autocompletion' # critical: No stop words are specified, as it was discovered via # testing that running with the default 'stop_words=None' still # associates a default set of stopwords to be utilized, one entry # of which is 'a' which precludes autocompletion when a simple 'a' # is provided. self.engine = RedisEngine( prefix=config[config_name].REDIS_AUTOCOMPLETE_SORTED_SET, stop_words=set(), cache_timeout=300, host=config[config_name].REDIS_HOSTNAME, port=config[config_name].REDIS_PORT, db=config[config_name].REDIS_DB, password=config[config_name].REDIS_PASSWORD, ) # self.engine = RedisEngine(prefix=config[config_name].REDIS_AUTOCOMPLETE_SORTED_SET, stop_words=None, cache_timeout=300, host=config[config_name].REDIS_HOSTNAME, port=config[config_name].REDIS_PORT, db=config[config_name].REDIS_DB, password=config[config_name].REDIS_PASSWORD) self.engine.flush()
import os import json import bson import pymongo from pymongo import Connection import urllib2 from redis_completion import RedisEngine import urlparse import redis url = urlparse.urlparse( 'redis://:[email protected]:6777') r = redis.Redis(host=url.hostname, port=url.port, password=url.password) engine = RedisEngine(host=url.hostname, port=url.port, password=url.password) connection = Connection( 'mongodb://*****:*****@alex.mongohq.com:10013/app8222672') jobs_collection = connection.app8222672.jobs jobs = [] for job in jobs_collection.find(): a = job['title'] a = a.lower() for word in a.split(): jobs.append(word) print jobs map(engine.store, jobs) #print engine.search(query.decode('cp1252'))
import urllib2 from redis_completion import RedisEngine import urlparse import redis engine = RedisEngine() titles = [ 'python programming', 'programming c', 'unit testing python', 'testing software', 'software design' ] map(engine.store, titles) print engine.search('test')
# -*- coding:utf-8 -*- import mmseg from pinyin import Pinyin from redis_completion import RedisEngine pinyin = Pinyin() engine = RedisEngine() def store_movie(movie): phrase = movie["title"] seg_phrase = " ".join(mmseg.seg_txt(phrase)) _pinyin_phrase = pinyin.get_pinyin(phrase) py_phrase = "".join([p[0] for p in _pinyin_phrase]).encode("utf-8") pinyin_phrase = "".join(_pinyin_phrase).encode("utf-8") phrase = "%s %s %s %s" % (phrase, seg_phrase, pinyin_phrase, py_phrase) engine.store_json(movie["id"], phrase, movie) def load_datas(): movies = [ {"id":20513051, "title": "被偷走的那五年", "director":"黄真真"} , {"id":20513052, "title": "十面埋伏", "director":"张艺谋"} , {"id":20513053, "title": "龙门镖局", "director":"王勇"} , {"id":20513054, "title": "致我们终将逝去的青春", "director":"赵薇"} , {"id":20513055, "title": "金枝欲孽2", "director":"戚其义"} ] for movie in movies: store_movie(movie) load_datas() for phrase in ["偷", "lmb", "jinzhi"]:
import urllib2 from redis_completion import RedisEngine import csv import redis engine = RedisEngine() # UPDATING CACHE txtfile = open('BestBuy.txt', "r") productList = txtfile.read().split("\n") setProductList = set(productList) productList = list(setProductList) map(engine.store, productList) Options = engine.search('Ultra HD TV Silver') for item in Options: print item $$$$$$$$$$$$$$$$$$$$$ >>> mystring = "alfa" >>> myfile = open("/home/gustacaste/FileA", 'w') >>> myfile.write(mystring) >>> myfile.close()
import urllib2 from redis_completion import RedisEngine engine = RedisEngine(prefix='stocks') def load_data(): url = 'http://media.charlesleifer.com/downloads/misc/NYSE.txt' contents = urllib2.urlopen(url).read() for row in contents.splitlines()[1:]: ticker, company = row.split('\t') engine.store_json(ticker, company, {'ticker': ticker, 'company': company}) # id, search phrase, data def search(p, **kwargs): return engine.search_json(p, **kwargs) if __name__ == '__main__': engine.flush() print 'Loading data (may take a few seconds...)' load_data() print 'Search data by typing a partial phrase, like "uni sta"' print 'Type "q" at any time to quit' while 1: cmd = raw_input('? ') if cmd == 'q': break results = search(cmd) print 'Found %s matches' % len(results) for result in results: print '%s: %s' % (result['ticker'], result['company'])
# -*- coding: utf8 import urllib2 from redis_completion import RedisEngine import urlparse import redis engine = RedisEngine() titles = [ 'python programming', 'programming c', 'unit testing python', u'программирование'.decode('utf-8') ] map(engine.store, titles)
from redis_completion import RedisEngine import pandas as pd import json engine = RedisEngine(prefix='umls') def loadumls(): engine = RedisEngine(prefix='umls') data = pd.read_csv("../umls/[email protected]") df = pd.DataFrame(data) df_dictarr = df.to_dict('records') for item in df_dictarr: # print item['value'] # id, search phrase, data engine.store_json(item['value'], item['label'], { 'label': item['label'], 'value': item['value'] }) def search(p): return engine.search_json(p)
def get_engine(self): return RedisEngine(prefix='testac', db=15)
# Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' ########NEW FILE######## __FILENAME__ = stocks import urllib2 from redis_completion import RedisEngine engine = RedisEngine(prefix='stocks') def load_data(): url = 'http://media.charlesleifer.com/downloads/misc/NYSE.txt' contents = urllib2.urlopen(url).read() for row in contents.splitlines()[1:]: ticker, company = row.split('\t') engine.store_json(ticker, company, { 'ticker': ticker, 'company': company }) # id, search phrase, data def search(p, **kwargs): return engine.search_json(p, **kwargs)
def create_app(env='debug'): """ TODO: if config is None: config = os.path.join(app.root_path, 'production.cfg') app.config.from_pyfile(config) """ app = Flask(__name__) app.config.from_object(__name__) app.config.update(SECRET_KEY=os.urandom(20)) if (env == 'debug'): app.debug = True if (env == 'prod'): app.debug = False if app.debug: from flaskext.lesscss import lesscss lesscss(app) app.static_path = '/static' # connect to the database connection = Connection( 'mongodb://*****:*****@alex.mongohq.com:10013/app8222672') jobs_collection = connection.app8222672.jobs users_collection = connection.app8222672.users url = urlparse.urlparse( 'redis://:[email protected]:6777') autocomplete_engine = RedisEngine(host=url.hostname, port=url.port, password=url.password) @app.route('/index/') @app.route('/home/') @app.route('/') def home(): return render_template('home.html') @app.route('/settings/') def settings(): return render_template('settings.html') @app.route('/list') def list(): jobs = [] for job in jobs_collection.find(): job['id'] = str(job['_id']) jobs.append(job) return render_template('list.html', jobs=jobs) @app.route('/delete/<jobid>') def delete(jobid): response = jobs_collection.remove({'_id': bson.ObjectId(jobid)}) if (response == None): flash('Job erased') else: flash('Error occured') return redirect(url_for('list')) @app.route('/jobs/<jobid>') def inside(jobid): found_job = jobs_collection.find_one( {'_id': bson.ObjectId(oid=str(jobid))}) found_job['id'] = str(found_job['_id']) found_job['objectid'] = str(found_job['_id']) found_job['_id'] = str(found_job['_id']) return render_template('inside.html', job=found_job) @app.route('/edit/<jobid>') def edit(jobid): return render_template('add.html', job=jobid) @app.errorhandler(404) def page_not_found(e): return render_template('404.html'), 404 # ==================== REGISTRATION ================== @app.route('/logout') def logout(): # remove the username from the session if it's there session.pop('logged_in', None) return redirect(url_for('home')) @app.route('/registration') def registration(): return render_template('registration.html') def get_user_id(email): return users_collection.find_one({'email': email}) @app.route('/login', methods=['GET', 'POST']) @app.route('/register', methods=['GET', 'POST']) def register(): """Registers the user.""" error = None if request.method == 'POST': logged_user = get_user_id(request.form['email']) if (logged_user): if (request.form['password'] == logged_user['password']): flash('Logged in as ' + logged_user['email']) session['logged_in'] = logged_user['email'] return redirect(url_for('home', session=session)) else: flash('Wrong password!') return redirect(url_for('register')) if not request.form['email'] or \ '@' not in request.form['email']: error = 'You have to enter a valid email address' elif not request.form['password']: error = 'You have to enter a password' elif get_user_id(request.form['email']) is not None: error = 'The username is already taken' else: new_user_id = users_collection.save({ 'email': request.form['email'], 'password': request.form['password'], 'status': 'awaiting confirm' }) payload = { 'from': 'Excited User <*****@*****.**>', 'to': request.form['email'], 'subject': 'Quick Hunt account confirmation', 'text': 'http://quickhunt.herokuapp.com/activate_user/' + str(new_user_id) } r = requests.post( "https://api.mailgun.net/v2/app8222672.mailgun.org/messages", auth=HTTPBasicAuth('api', 'key-9m9vuzkafbyjqhm9ieq71n0lu9dgf9b9'), data=payload) flash( 'You were successfully registered. Confirm registration and login.' ) session['logged_in'] = request.form['email'] flash('logged in successfuly') return redirect(url_for('home')) #flash('no luck ((' + request.method + error) flash('error:' + str(error)) return render_template('login.html', error=error) @app.route('/activate_user/<user_id>') def activate_user(user_id): """ Activate user function. """ found_user = users_collection.find_one( {'_id': bson.ObjectId(oid=str(user_id))}) if not found_user: return abort(404) else: if found_user['status'] == 'awaiting_confirm': ### Setting the user status active here ###* confirm_mail = { 'from': 'Quick Hunt <*****@*****.**>', 'to': found_user['email'], 'subject': 'Quick Hunt account confirmation', 'text': 'Subscription confirmed.' } r = requests.post( "https://api.mailgun.net/v2/app8222672.mailgun.org/messages", auth=HTTPBasicAuth('api', 'key-9m9vuzkafbyjqhm9ieq71n0lu9dgf9b9'), data=confirm_mail) flash('user has been activated', 'info') elif found_user['status'] == 'active': flash('user already activated', 'info') return redirect(url_for('content')) """ This is the API part of the equation """ """ @app.errorhandler(404) def not_found(error=None): message = { 'status': 404, 'message': 'Not Found: ' + request.url, } resp = jsonify(message) resp.status_code = 404 return resp """ @app.route('/api/search/', methods=['GET']) @app.route('/api/search/<query>', methods=['GET']) def search(query=None): jobs = [] for job in jobs_collection.find(): job['_id'] = str(job['_id']) jobs.append(job) return jsonify({'result': jobs}) @app.route('/api/jobs/<jobid>', methods=['GET']) def get_job(jobid): found_job = jobs_collection.find_one( {'_id': bson.ObjectId(oid=str(jobid))}) found_job['id'] = str(found_job['_id']) found_job['objectid'] = str(found_job['_id']) found_job['_id'] = str(found_job['_id']) #if userid in users: return jsonify(found_job) #else: # return not_found() #return undef @app.route('/api/jobs/new', methods=['POST']) def create_job(): js = json.dumps(request.data) json_data = json.loads(request.data) jobs_collection.save(json_data) resp = Response(js, status=200, mimetype='application/json') return resp @app.route('/api/jobs/<jobid>', methods=['PUT']) def update_job(jobid): js = json.dumps(request.data) print 'js:' + str(js) json_data = json.loads(request.data) json_data['_id'] = bson.ObjectId(json_data['objectid']) jobs_collection.save(json_data) resp = Response(js, status=200, mimetype='application/json') return resp @app.route('/api/jobs/<jobid>', methods=['DELETE']) def delete_job(jobid): response = jobs_collection.remove({'_id': bson.ObjectId(jobid)}) if (response == None): return jsonify({success: 'Success'}) else: return jsonify({error: 'Error'}) @app.route('/api/autocomplete/', methods=['GET']) def autocomplete(): js = {} searchword = request.args.get('q', '') if searchword: js = json.dumps({'result': autocomplete_engine.search(searchword)}) else: js = {'error': 'invalid argument'} return Response(js, status=200, mimetype='application/json') @app.route('/api/favorites/add/<jobid>', methods=['GET']) # FIX IT (i mean get) def add_to_favorites(jobid): me = users_collection.find_one({'email': session['logged_in']}) if 'favorites' in me: me['favorites'].append(jobid) else: me['favorites'] = [jobid] js = users_collection.save(me) print js resp = Response({'reply': js}, status=200, mimetype='application/json') return resp @app.route('/api/favorites/delete/<jobid>', methods=['DELETE']) # FIX IT (i mean get) def remove_from_favorites(jobid): me = users_collection.find_one({'email': session['logged_in']}) if 'favorites' in me: favorites = me['favorites'] favoretes.remove(jobid) me['favorites'] = favorites js = users_collection.save(me) print js resp = Response({'reply': js}, status=200, mimetype='application/json') return resp return app
import urllib2 from redis_completion import RedisEngine import urlparse import redis engine = RedisEngine() titles = ['python programming', 'programming c', 'unit testing python', 'testing software', 'software design'] map(engine.store, titles) print engine.search('test')
REDIS_DB = app.config['REDIS_DB'] AWS_KEY = app.config['AWS_KEY'] AWS_SECRET_KEY = app.config['AWS_SECRET_KEY'] BUCKET_NAME = app.config['BUCKET_NAME'] LOGOS_BUCKET_NAME = app.config['LOGOS_BUCKET_NAME'] SENDGRID_USERNAME = os.environ.get('SENDGRID_USERNAME', '') SENDGRID_PASSWORD = os.environ.get('SENDGRID_PASSWORD', '') redis_url = os.environ.get('REDISTOGO_URL', None) if redis_url: redis_url = urlparse.urlparse(redis_url) redisco.connection_setup(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password) autocomplete_engine = RedisEngine(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password) redisco.connection_setup(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password) autocomplete_engine = RedisEngine(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password) else: redisco.connection_setup(host='localhost', port=6379, db=0) autocomplete_redis_client = redis.Redis(host='localhost', port=6379, db=0) autocomplete_engine = RedisEngine(autocomplete_redis_client) ALLOWED_EXTENSIONS = set(('txt', 'pdf', 'ppt', 'pptx', 'zip', 'tar', 'rar')) ALLOWED_IMAGE_EXTENSIONS = set(('png', 'jpg'))