Exemplo n.º 1
0
def logs_new_guild(guild_name, guild_server, guild_region):
	try:
		r = redis.from_url(os.environ.get('REDISTOGO_URL'))
	except:
		import config
		r = redis.from_url(config.REDISTOGO_URL)
	guild = {}
	guild["guild_name"] = guild_name
	guild["guild_server"] = guild_server
	guild["guild_region"] = guild_region
	guild["logs"] = []
	start_time = 1438387200000
	response = requests.get("https://www.warcraftlogs.com:443/v1/reports/guild/"+guild_name+"/"+guild_server+"/"+guild_region+"?start="+str(start_time)+"&api_key=9457bbf774422ab14b5625efb2b35e36")
	response = json.loads(response.text)
	# print response
	for log in response:
		if log["zone"] == 8:
			new_log = {}
			new_log["log_id"] = log["id"]
			new_log["title"] = log["title"]
			new_log["start"] = log["start"]/1000
			new_log["date"] = datetime.date.fromtimestamp(log["start"]/1000)
			new_log["owner"] = log["owner"]
			guild["logs"].append(new_log)
	guild["last_checked"] = int(time.time())
	guild["last_checked_dt"] = datetime.datetime.fromtimestamp(guild["last_checked"])
	guild_id_string = guild_name+"_"+guild_server+"_"+guild_region
	guild = analyze_guild_logs(guild, r)
	r.hmset(guild_id_string, guild)
	return guild
Exemplo n.º 2
0
def image_upload():
    """
    The image upload route
    """
    urls = request.json['urls']

    # get credentials from the redis server
    rd = redis.from_url(current_app.config['REDIS_URL'])
    try:
        credentials = rd.get("credentials")
    except:
        response_object = {'status': "error, no access token is found, without it you can no longer upload image files."}
        return jsonify(response_object)

    # create new Task
    tsk = Task()
    tsk.initialize(urls,credentials)

    # create and add Job
    with Connection(redis.from_url(current_app.config['REDIS_URL'])):
        q = Queue()
        task = q.enqueue('project.server.main.works.long_work', tsk)
    if task:
        response_object = {'jobId': task.get_id()}
    else:
        response_object = {
            'status': "error, could start job, this may mean that there are no workers running, or the redis server is down"}

    return jsonify(response_object), 202
Exemplo n.º 3
0
def last_log_from_guild(guild_name, guild_server, guild_region, r):
	try:
		r = redis.from_url(os.environ.get('REDISTOGO_URL'))
	except:
		import config
		r = redis.from_url(config.REDISTOGO_URL)
	guild = {}
	guild["guild_name"] = guild_name
	guild["guild_server"] = guild_server
	guild["guild_region"] = guild_region
	guild["logs"] = []
	start_time = 1435734000000
	end_time = 1437465661000
	try:
		response = requests.get("https://www.warcraftlogs.com:443/v1/reports/guild/"+guild_name+"/"+guild_server+"/"+guild_region+"?start="+str(start_time)+"&api_key=9457bbf774422ab14b5625efb2b35e36")
		response = json.loads(response.text)
		log_id = response[0]["id"]
		try:
			report = analyze(log_id)
		except:
			good_log = False
			for log_dict in response[::-1]:
				try:
					log_id = log_dict["id"]
					report = analyze(log_id)
					good_log = True
				except:
					pass
				if good_log == True:
					break
	except:
		report = False
	return report
Exemplo n.º 4
0
    def __get_connection(self) -> redis.Redis:
        """
        Get a Redis connection

        :return: Redis connection instance
        :rtype: redis.Redis
        """

        if self.__redis_use_socket:
            r = redis.from_url(
                'unix://{:s}?db={:d}'.format(
                    self.__redis_host,
                    self.__redis_db
                )
            )
        else:
            r = redis.from_url(
                'redis://{:s}:{:d}/{:d}'.format(
                    self.__redis_host,
                    self.__redis_port,
                    self.__redis_db
                )
            )

        if BlackRed.Settings.REDIS_AUTH is not None:
            r.execute_command('AUTH {:s}'.format(BlackRed.Settings.REDIS_AUTH))
        return r
Exemplo n.º 5
0
def from_settings(settings):
    host = settings.get('REDIS_HOST',REDIS_HOST)
    port = settings.get('REDIS_PORT',REDIS_PORT)
    redis_url = settings.get('REDIS_URL',REDIS_URL)
    if redis_url:
        redis.from_url(redis_url)
    else:
        redis.Redis(host = host,port = int(port))
Exemplo n.º 6
0
def setup_rq_connection():
    # 'RQ_DASHBOARD_REDIS_URL' environmental variable takes priority;
    #   otherwise, we look at the Flask app's config for the redis information.
    if os.environ.get('RQ_DASHBOARD_REDIS_URL', None):
        redis_conn = from_url(os.environ.get('RQ_DASHBOARD_REDIS_URL'))
    elif current_app.config.get('REDIS_URL'):
        redis_conn = from_url(current_app.config.get('REDIS_URL'))
    else:
        redis_conn = Redis(host=current_app.config.get('REDIS_HOST', 'localhost'),
                       port=current_app.config.get('REDIS_PORT', 6379),
                       password=current_app.config.get('REDIS_PASSWORD', None),
                       db=current_app.config.get('REDIS_DB', 0))
    push_connection(redis_conn)
Exemplo n.º 7
0
def get_redis_connection(config):
    """
    Returns a redis connection from a connection config
    """
    if "URL" in config:
        return redis.from_url(config["URL"], db=config["DB"])
    if "USE_REDIS_CACHE" in config.keys():

        from django.core.cache import get_cache

        cache = get_cache(config["USE_REDIS_CACHE"])

        if hasattr(cache, "client"):
            # We're using django-redis. The cache's `client` attribute
            # is a pluggable backend that return its Redis connection as
            # its `client`
            try:
                # To get Redis connection on django-redis >= 3.4.0
                # we need to use cache.client.get_client() instead of
                # cache.client.client used in older versions
                try:
                    return cache.client.get_client()
                except AttributeError:
                    return cache.client.client
            except NotImplementedError:
                pass
        else:
            # We're using django-redis-cache
            return cache._client

    return redis.Redis(host=config["HOST"], port=config["PORT"], db=config["DB"], password=config.get("PASSWORD", None))
Exemplo n.º 8
0
def __get_redis(app_ctx):
    """Constructs Redis Client object.

    :return: Redis Client object
    """

    return redis.from_url(app_ctx.config['REDIS_URL'])
Exemplo n.º 9
0
def main():
    global LAST_UPDATE_ID
    telegram_token = os.environ.get("TELEGRAM_TOKEN")

    logging.basicConfig(format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    logger = logging.getLogger("Maslahat.uz")
    logger.setLevel(logging.DEBUG)

    # logger.debug("Initalizing bot ...")
    try:
        bot = telegram.Bot(telegram_token)
        # logger.debug("Connected to Telegram API")
    except telegram.error.TelegramError:
        pass
        # logger.warning("Cannot connect to Telegram server!")

    redis_url = os.environ.get("REDIS_URL")
    redis_conn = redis.from_url(redis_url)
    # logger.debug("Connected to Redis")

    # logger.debug("Receiving updates ...")
    try:
        LAST_UPDATE_ID = bot.getUpdates()[-1].update_id
        # logger.debug("Updates received")
    except IndexError:
        # logger.warning("No update received")
        LAST_UPDATE_ID = None

    # logger.debug("Starting heartbeat ...")
    heart_beat(logger, stat)
    # logger.debug("Waiting for updates ...")
    while True:
        bot_worker(redis_conn, bot, logger)
        check_facebook(redis_conn, bot, logger)
        check_announcements(redis_conn, bot, logger)
Exemplo n.º 10
0
    def __init__(self, cb):
        # these instance variables are just for convenience
        self.user = cb.user
        self.config = cb.config
        self.ws = cb
        self.thread_pool_executor = futures.ThreadPoolExecutor(max_workers=20)

        self.triggers = []
        self.join_time = {}

        self.current_users = cb.currentusers

        self.battling = False  # self.config.get('Chatbot', 'battle')

        try:
            redis_uname = self.config.get('External', 'redis_uname')
            redis_pass = self.config.get('External', 'redis_pass')
            redis_server = self.config.get('External', 'redis_server')
            redis_url = os.getenv('REDISTOGO_URL', 'redis://%s:%s@%s' %
                                  (redis_uname, redis_pass, redis_server))

            self.redis = redis.from_url(redis_url)
            # self.redis = redis.from_url('redis://127.0.0.1:6379')
        except Exception as e:
            print e
            print "Redis connection failed (ignore if you're not using redis)"

        self.initialise_triggers(self.config)
        self.initialise_queue()
Exemplo n.º 11
0
def get_redis_connection(config):
    """
    Returns a redis connection from a connection config
    """
    if 'URL' in config:
        return redis.from_url(config['URL'], db=config['DB'])
    if 'USE_REDIS_CACHE' in config.keys():

        from django.core.cache import get_cache
        cache = get_cache(config['USE_REDIS_CACHE'])

        if hasattr(cache, 'client'):
            # We're using django-redis. The cache's `client` attribute
            # is a pluggable backend that return its Redis connection as
            # its `client`
            try:
                return cache.client.client
            except NotImplementedError:
                pass
        else:
            # We're using django-redis-cache
            return cache._client

    return redis.Redis(host=config['HOST'],
                       port=config['PORT'], db=config['DB'],
                       password=config.get('PASSWORD', None))
Exemplo n.º 12
0
def from_settings(settings):
    """
    :param: settings object
    :return: Channel object
    """

    connection_type = settings.get('RABBITMQ_CONNECTION_TYPE', RABBITMQ_CONNECTION_TYPE)
    connection_parameters = settings.get('RABBITMQ_CONNECTION_PARAMETERS', RABBITMQ_CONNECTION_PARAMETERS)

    connection = {
        'blocking': pika.BlockingConnection,
        'libev': pika.LibevConnection,
        'select': pika.SelectConnection,
        'tornado': pika.TornadoConnection,
        'twisted': pika.TwistedConnection
    }[connection_type](pika.ConnectionParameters(**connection_parameters))

    channel = connection.channel()
    channel.basic_qos(prefetch_count=1)

    url = settings.get('REDIS_URL', REDIS_URL)
    host = settings.get('REDIS_HOST', REDIS_HOST)
    port = settings.get('REDIS_PORT', REDIS_PORT)

    # REDIS_URL takes precedence over host/port specification.
    if url:
        redis_server = redis.from_url(url)
    else:
        redis_server = redis.Redis(host=host, port=port)

    return channel, redis_server
Exemplo n.º 13
0
 def __init__(self, hot_key='hot', url="redis://127.0.0.1", db=None, **kwargs):
     self._conn = None
     self._conn = redis.from_url(url, db=db, **kwargs)
     try:
         self._conn.ping()
     except redis.ConnectionError, err:
         raise ConnectionError(str(err))
Exemplo n.º 14
0
    def handle(self, *args, **options):
        redis_conn = redis.from_url(os.getenv("REDISTOGO_URL", "redis://localhost:6379"))
        states = redis_conn.get("states")
        if states is None:
            raise CommandError('"states" are not in redis cache!')
        states = loads(states)

        for state_data in states:
            self.stdout.write("Processing state: %s\n" % state_data["NAME"])
            get_statistics_for_area(None, state_data["state"])

        counties = redis_conn.get("counties")
        if counties is None:
            raise CommandError('"counties" are not in redis cache!')
        counties = loads(counties)

        for county_data in counties:
            self.stdout.write("Processing county: %s\n" % county_data["NAME"])
            get_statistics_for_area(None, "%s,%s" % (county_data["state"], county_data["county"]))

        for state_data in states:
            state_msas = redis_conn.get("msa,%s" % state_data["state"])
            if state_msas is None:
                raise CommandError('"MSAs" for %s are not in redis cache!' % state_data["state"])
            state_msas = loads(state_msas)
            for msa_data in state_msas:
                self.stdout.write("Processing msa: %s\n" % msa_data["NAME"])
                get_statistics_for_area(
                    None,
                    "%s,%s,"
                    % (msa_data["state"], msa_data["metropolitan statistical area/micropolitan statistical area"]),
                )
Exemplo n.º 15
0
 def __init__(self, url, key, db=None, includes=None, excludes=None, **kwargs):
     super(RedisHandler, self).__init__()
     self.__key = key
     self.__includes = includes
     self.__excludes = excludes or ("args", "exc_info", "msg", "stack_info")
     self.__redis = redis.from_url(url, db, **kwargs)
     self.__redis.delete(self.__key)     # remove key if exists
Exemplo n.º 16
0
def settings_update(request, setting_to_set, new_value=None):
    if request.method == "POST":
        new_value = request.POST["new_value"]
    # only except post? they have to be logged in anyway though...
    try:  # this could get a get_or_create but that limits us and would just make us write code if we wanted slightly different functionality
        setting = AccountSetting.objects.get(user=request.user, setting_name=setting_to_set)
        if setting.setting_value == new_value:
            return HttpResponse(json.dumps({"msg": "no change"}), content_type='application/json', status=200)
    except Exception as e:
        print("Hoping it just didn't exit yes, just in case :: {}".format(e))
        setting = AccountSetting()
        setting.user = request.user
        setting.setting_name = setting_to_set
    print(setting_to_set, new_value)
    if setting.setting_value == new_value:
        pass
    else:
        setting.setting_value = new_value
        setting.save()
        # now that it's saved in the DB lets save it in the cache! someday...
        try:
            r = redis.from_url(os.getenv('REDISTOGO_URL', 'redis://localhost:6379'))
            r.hset("user.settings.{}.hash".format(request.user.id), setting_to_set, new_value)
        except Exception as e:
            print(e)
    return HttpResponse(json.dumps({"msg": "I'm not a useful return..."}), content_type='application/json', status=200)
Exemplo n.º 17
0
def main():
    rclient = redis.from_url(redis_url)
    cache = rcache.Rcache(cache_url, server_id)
 
    log_file = rclient.get("log_file")
    log_pos = rclient.get("log_pos")
    log_pos = int(log_pos) if log_pos else None
 
    only_events = _trans_events(events)
    only_events.append(RotateEvent)
 
    stream = BinLogStreamReader(
        connection_settings=mysql_settings,
        server_id=server_id,
        blocking=blocking,
        only_events=only_events,                                                                                                                                                       
        only_tables=tables,
        only_schemas=schemas,
        resume_stream=True,  # for resuming
        freeze_schema=False, # do not support alter table event for faster
        log_file=log_file,
        log_pos=log_pos)
    row_count = 0
 
    for binlogevent in stream:
        if int(time.time()) - binlogevent.timestamp > binlog_max_latency:
            logger.warn("latency[{}] too large".format(
                int(time.time()) - binlogevent.timestamp))
        logger.debug("catch {}".format(binlogevent.__class__.__name__))
        if isinstance(binlogevent, RotateEvent):  #listen log_file changed event
            rclient.set("log_file", binlogevent.next_binlog)
            rclient.set("log_pos", binlogevent.position)
            logger.info("log_file:{}, log_position:{}".format(
                binlogevent.next_binlog, binlogevent.position))
        else:
            row_count += 1
            table = "%s.%s" % (binlogevent.schema, binlogevent.table)
            vals_lst = _get_row_values(binlogevent)
            if not binlogevent.primary_key:
                tables_without_primary_key.get(table, None)
            try:
                cache.save(table, binlogevent.primary_key, vals_lst)
                logger.debug("save {} {} rows to cache".format(
                    table, len(vals_lst)))
            except rcache.SaveIgnore as err:
                logger.warning(str(err))
            except rcache.FullError as err:
                logger.info("cache OOM occured: {}.trigger dump command".format(
                    str(err)))
                dump_code = _trigger_dumping()
                cache.save(table, binlogevent.primary_key, vals_lst)
            if cache_max_rows and cache.size > cache_max_rows:
                logger.info("cache size:{} >= {}, trigger dumping".format(
                   cache.size, cache_max_rows))
                _trigger_dumping()
            rclient.set("log_pos", binlogevent.packet.log_pos)
        if row_count % 1000 == 0:
            logger.info("save {} changed rows".format(row_count))
 
    stream.close()
Exemplo n.º 18
0
    def __init__(self, author, text, bot, add_from_twitch=True, useOnlyText=False):
        self.redis = redis.from_url(config.redisURL)

        twitch_from_prefix = "(From Twitch)"

        self.author = author

        options_exist = "options" in bot
        if options_exist:
            options = bot['options']
        display_exist = options_exist and "displayFromMessages" in options
        display_from_messages_disabled = display_exist and options['displayFromMessages'] is False

        if display_from_messages_disabled:
            add_from_twitch = False

        message = ""
        if add_from_twitch:
            message = twitch_from_prefix + " "

        if author and useOnlyText is not True:
            message = message + author + ": "

        self.message = message + text

        self.bot_id = bot['_id']
Exemplo n.º 19
0
def assignTemplateForView(request):
    """
        accept post, else use get function
    """
    if request.method == "POST":
        try:
            print(request.POST["viewName"])
            temp = UserTemplates.objects.get(user=request.user, viewName=request.POST["viewName"])
        except:
            temp = UserTemplates()
        form = UserTemplatesForm(request.POST, instance=temp)
        # print(dir(form))
        try:
            form.full_clean()
            itm = form.save()
            redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')  # this is for the heroku install!
            r = redis.from_url(redis_url)
            try:
                print("{0} - {1} - {2}".format("user.settings.{}.hash".format(itm.user.id), itm.viewName, itm.pathToTemplate))
                x = r.hset("user.settings.{}.hash".format(itm.user.id), itm.viewName, itm.pathToTemplate)
                print(x)
            except Exception as e:
                print("post set...")
                print(e)
                print("...post e")
        except Exception as e:
            print("Clean or save failed...")
            print(e)
            print(form.errors)
    else:
        form = UserTemplatesForm(initial={"user": request.user})
    return render_to_response("user/user_template_form.html", {'form': form}, RequestContext(request))
Exemplo n.º 20
0
    def __init__(self, *args, **config):
        super(RedisObserver, self).__init__(*args, **config)
        self.redis = redis.from_url(config.get("redis_url",
                                    "redis://localhost:6379/0"),
                                    float(config.get("timeout", 5)))

        self.restart_on_timeout = config.get("restart_on_timeout", None)
Exemplo n.º 21
0
def setup_redis(args):
    if args.url is not None:
        redis_conn = redis.from_url(args.url, db=args.db)
    else:
        redis_conn = redis.Redis(host=args.host, port=args.port, db=args.db,
            password=args.password)
    use_connection(redis_conn)
Exemplo n.º 22
0
 def __init__(self):
     self.redis_instance = redis.from_url(REDIS_URL)
     self.recarea_list = []
     self.small_recarea_dict = {
         key.split('_')[0]: json.loads(self.redis_instance.get(key))
         for key in self.redis_instance.keys() if key.find('_small') != -1
     }
Exemplo n.º 23
0
 def test_get_website_from_redis(self):
     #clear redis
     redis = get_redis()
     redis.flushall()
     
     #store known website in redis
     website = Website.objects.all()[0]
     logging.debug('website from DB: %s' % website)
     website.prepare()
     website._special_attribute = 'special'
     
     cache_website(website, get_website_redis_key(website.token), redis)
     
     # retrieve via normal method
     website_from_method = get_website(website.token)
     
     self.assertEqual(website, website_from_method)
     self.assertTrue(hasattr(website_from_method, '_special_attribute') and website_from_method._special_attribute == 'special')
     
     #clear redis and store website in backup...
     redis.flushall()
     redis = switch_redis()
     website._special_attribute = 'special backup'
     
     cache_website(website, get_website_redis_key(website.token), redis)
     
     # be very clear that it's in the backup
     main_redis = redis.from_url(REDIS_URL) # Redis(**REDIS_DBS['default'])
     main_redis.flushdb()
     
     # retrieve via normal method
     website_from_method = get_website(website.token)
     
     self.assertEqual(website, website_from_method)
     self.assertTrue(hasattr(website_from_method, '_special_attribute') and website_from_method._special_attribute == 'special backup')
Exemplo n.º 24
0
    def run(self):
        REDIS_URL = os.environ.get('OPENREDIS_URL', 'redis://localhost:6379')
        client = redis.from_url(REDIS_URL)
        pubsub = client.pubsub()
        pubsub.subscribe(['table'])

        for message in pubsub.listen():
            if message['type'] == 'subscribe':
                continue
            data = json.loads(message['data'])
            self.output_device(data)
            print('player: {} next: {} data: {}'
                  .format(self.name, self.next_player, data))
            if data['action'] == 'quit':
                if data['player'] == self.name:
                    break
                if data['player'] == self.next_player:
                    self.next_player = data['next']
                continue
            if data['action'] == 'won':
                if data['player'] == self.name:
                    break
                if data['player'] == self.next_player:
                    self.next_player = data['next']
                continue
            if data['action'] == 'join':
                if data['before'] == self.next_player:
                    self.next_player = data['player']
                continue
            self.current_middle = data['middle']
            if data['next'] == self.name:
                self.play(data)
Exemplo n.º 25
0
def notification():
    logging.basicConfig()
    redis_instance = redis.from_url(settings.REDIS_URL)

    garoa_response = requests.get('http://status.garoa.net.br/status')
    if garoa_response.status_code == 200:
        garoa_response_json = garoa_response.json()
        garoa_opened = garoa_response_json.get('open', False)
        garoa_opened_last_check = bool(redis_instance.get('GAROA_OPEN'))

        if garoa_opened:
            garoa_status = 'aberto!'
            redis_instance.set('GAROA_OPEN', True)
        else:
            garoa_status = 'fechado.'
            redis_instance.delete('GAROA_OPEN')

        if garoa_opened != garoa_opened_last_check:
            notification_request = {
                'title': 'Garoa {status}'.format(status=garoa_status),
                'type': 'note',
            }

            for client in PushbulletClient.objects.all():
                response = requests.post('https://api.pushbullet.com/v2/pushes', auth=(client.access_token, ''), data=notification_request)
                if response.status_code == 200:
                    logging.warn(u'Usuário {id} notificado.'.format(id=client.pk))
                else:
                    logging.warn(u'Erro na notificação do Usuário {id}.'.format(id=client.pk))
        else:
            logging.warn(u'O status do Garoa não mudou desde a última verificação.')
    else:
        logging.warn(u'Erro na consulta à API do status do Garoa.')
Exemplo n.º 26
0
    def run(self, debug=None):
        """
        启动
        :param debug: 是否debug
        :return:
        """

        assert len(self.config['FORWARDER_INPUT_ADDRESS_LIST']) == len(self.config['FORWARDER_OUTPUT_ADDRESS_LIST'])

        if self.config['REDIS_URL']:
            import redis
            rds = redis.from_url(self.config['REDIS_URL'])
            self.share_store = ShareStore(rds,
                                          self.config['REDIS_KEY_SHARE_PREFIX'] + self.config['REDIS_USER_KEY_PREFIX'],
                                          self.config['REDIS_KEY_SHARE_PREFIX'] + self.config['REDIS_NODES_KEY'],
                                          self.config['REDIS_USER_MAX_AGE']
                                          )

        if debug is not None:
            self.debug = debug

        workers = len(self.config['FORWARDER_INPUT_ADDRESS_LIST'])

        def run_wrapper():
            logger.info('Running server, debug: %s, workers: %s',
                        self.debug, workers)

            setproctitle.setproctitle(self._make_proc_name('forwarder:master'))
            # 只能在主线程里面设置signals
            self._handle_parent_proc_signals()
            self.proc_mgr.spawn_workers(workers, self._worker_run)

        run_wrapper()
Exemplo n.º 27
0
def particle(hydrodataset, part, model):

    from paegan.logger import logger
    from paegan.logger.redis_handler import RedisHandler
    rhandler = RedisHandler(model.redis_log_channel, model.redis_url)
    rhandler.setLevel(logging.PROGRESS)
    logger.addHandler(rhandler)

    try:
        redis_connection = redis.from_url(model.redis_url)
        forcer = BaseForcer(hydrodataset,
                            particle=part,
                            common_variables=model.common_variables,
                            times=model.times,
                            start_time=model.start,
                            models=model._models,
                            release_location_centroid=model.reference_location.point,
                            usebathy=model._use_bathymetry,
                            useshore=model._use_shoreline,
                            usesurface=model._use_seasurface,
                            reverse_distance=model.reverse_distance,
                            bathy_path=model.bathy_path,
                            shoreline_path=model.shoreline_path,
                            shoreline_feature=model.shoreline_feature,
                            time_method=model.time_method,
                            redis_url=model.redis_url,
                            redis_results_channel=model.redis_results_channel,
                            shoreline_index_buffer=model.shoreline_index_buffer
                           )
        forcer.run()
    except Exception:
        redis_connection.publish(model.redis_results_channel, json.dumps({"status" : "FAILED", "uid" : part.uid }))
    else:
        redis_connection.publish(model.redis_results_channel, json.dumps({"status" : "COMPLETED", "uid" : part.uid }))
Exemplo n.º 28
0
def send_directions_page(recipient, page_size):
    redis_client = redis.from_url(os.getenv('REDIS_URL', 'redis://localhost:6379'))
    key = STEPS_KEY_TMPL.format(phone_number=recipient)

    steps = redis_client.lrange(key, 0, page_size - 1)
    length = redis_client.llen(key)
    redis_client.ltrim(key, page_size, length - 1)
    head, tail = steps[:-1], steps[-1]
    for step in head:
        decoded = json.loads(step)
        send_message(
            recipient,
            TWILIO_SHORTCODE,
            body=decoded['text'],
            media_urls=[decoded['image']],
        )

    decoded = json.loads(tail)
    if redis_client.llen(key) > 0:
        body = '{} (Reply "next" for next page)'.format(decoded['text'])
        redis_client.expire(key, REDIS_EXPIRATION)
    else:
        body = decoded['text']

    send_message(
        recipient,
        TWILIO_SHORTCODE,
        body=body,
        media_urls=[decoded['image']],
    )
Exemplo n.º 29
0
 def test_add_command_two_lang(self):
     get_dao().add_commands(gen_test_commands(10, 'zh_TW'))
     get_dao().add_commands(gen_test_commands(20, 'en'))
     r = redis.from_url(REDIS_URL)
     assert 10 == len(r.keys('COMMAND::zh_TW::*'))
     assert 20 == len(r.keys('COMMAND::en::*'))
     assert 30 == len(r.keys('COMMAND::*'))
Exemplo n.º 30
0
    def update_progress(self, finish=None):
        """ Function was rewritten from original """
        if self.request.id:
            if finish:
                self.update_state(None, PROGRESS, {
                    'progress_percent': 100,
                    'time_remaining': 0,
                })
            else:
                try:
                    r = redis.from_url(settings.BROKER_URL)
                    celery_task_key = 'celery_%s' % self.__class__.name
                    t_start, t_estimated = r.hmget(celery_task_key,
                                                   ['%s_time_start' % self.request.id,
                                                    '%s_time_estimated' % self.request.id])
                    t_start, t_estimated = int(t_start), int(t_estimated)
                    cur_time = int(time())
                    total_time = t_estimated - t_start
                    part_time = cur_time - t_start
                    if total_time:
                        progress_percent = 100 * part_time / total_time
                        time_remaining = t_estimated - cur_time
                    else:
                        progress_percent = 100
                        time_remaining = 0

                    self.update_state(None, PROGRESS, {
                        'progress_percent': progress_percent,
                        'time_remaining': time_remaining,
                    })
                except Exception as e:
                    logger.debug(e)
                    logger.debug('Redis doesn\'t work 4')
Exemplo n.º 31
0
 def __init__(self):
     self.rediscon = redis.from_url('redis://localhost:6379')
     self.underlying_data_nodes = {}
Exemplo n.º 32
0
from flask import Flask, redirect
from flask import Flask, flash, redirect, render_template, request, session, abort, url_for, make_response
import os, random
from redis import StrictRedis, from_url

# http://cristian.regolo.cc/2015/07/07/introducing-the-geo-api-in-redis.html

app = Flask(__name__)
app.secret_key = 'super secret key'

## connect to redis at startup ; connection string in case of Heroku
connection_string = 'redis://*****:*****@app.route("/drivers/<int:id>/location", methods=["PUT"])
def put_driver(id):
    s_lat = request.form.get('latitude')
    s_lon = request.form.get('longitude')
    print s_lat, s_lon, id, type(id)
    lat = float(s_lat)
    lon = float(s_lon)

    if (lat > 90 or lat < -90 or lon > 90 or lon < -90):
        return make_response(
            '{"error" : "Invalid lat/lon must be between -90 to 90"}', 422)

    if id > 50000 or id < 1:
        return make_response('{}', 404)
Exemplo n.º 33
0
def initialize(context):

    log.info('Initializing Algorithm')

    # Adjustable variables
    context.avoid_trades = []
    context.trade_restrictions = [
        'ANDV', 'DDAIF', 'DHR', 'HL', 'FTV', 'LPX', 'DDAIF', 'NNHE', 'NVST',
        'PNM', 'SKY', 'XSAU'
    ]

    rebalance_hours = 0.0
    rebalance_minutes = 8.0
    context.long_exposure = 0.85
    context.short_exposure = -0.85
    context.num_longs = 15
    context.num_shorts = 15
    context.spyleverage = 1.25
    context.std_cutoff = 0.15

    # Fixed variables
    rebalance_start_time = rebalance_hours * 60 + rebalance_minutes
    context.combined_restrictions = context.avoid_trades + context.trade_restrictions
    context.rebalance_complete = False
    context.trade_queue = {}
    context.rolling_portfolios = []
    context.clear_queue_run = 0
    context.long_weight = context.long_exposure / context.num_longs
    context.short_weight = context.short_exposure / context.num_shorts
    context.SPY = symbol('VOO')

    r = redis.from_url(os.environ.get("REDIS_URL"))
    try:
        loaded_state = pickle.loads(r.get('pylivetrader_redis_state'))
        loaded_ndays = loaded_state['ndays']
        log.debug('Loaded ndays = {}'.format(loaded_ndays))
    except:
        loaded_ndays = 0
        log.debug('No state has been loaded: ndays = {}'.format(loaded_ndays))

    context.ndays = loaded_ndays
    context.idays = 3

    attach_pipeline(make_pipeline(), 'pipeline')

    # Scheduling functions
    schedule_function(rebalance, date_rules.every_day(),
                      time_rules.market_open(minutes=rebalance_start_time))

    clear_queue_frequency = 1
    clear_queue_duration = 56
    clear_queue_start = int(rebalance_start_time) + 4
    for minutez in range(clear_queue_start,
                         clear_queue_start + clear_queue_duration,
                         clear_queue_frequency):
        schedule_function(clear_queue, date_rules.every_day(),
                          time_rules.market_open(minutes=minutez))

    check_order_frequency = 10
    check_order_duration = 50
    check_order_start = int(rebalance_start_time) + 10
    for minutez in range(check_order_start,
                         check_order_start + check_order_duration,
                         check_order_frequency):
        schedule_function(check_order_status, date_rules.every_day(),
                          time_rules.market_open(minutes=minutez))

    eod_operations_start_time = int(rebalance_start_time) + 70
    schedule_function(
        eod_operations, date_rules.every_day(),
        time_rules.market_open(minutes=eod_operations_start_time))
Exemplo n.º 34
0
def get_redis():
    global REDIS_CONNECTION
    if REDIS_CONNECTION is None:
        REDIS_CONNECTION = redis.from_url(get_redis_url())
    return REDIS_CONNECTION
Exemplo n.º 35
0
import redis
from rq import Worker, Connection
from config import REDIS_QUEUE, REDIS_HOST

if __name__ == '__main__':
    redis_connection = redis.from_url(REDIS_HOST)
    with Connection(redis_connection):
        worker = Worker(REDIS_QUEUE)
        worker.work()
Exemplo n.º 36
0
    'naturaldate': humanize.naturaldate,
    'naturaltime': humanize.naturaltime,
    'naturalsize': humanize.naturalsize,
    'datetime': format_datetime,
    'isodatetime': format_isodatetime,
    'format_currency': format_currency,
    'uuid': format_uuid,
})
app.static_folder = 'static'

if app.config['PROXY_FIX']:
    from werkzeug.contrib.fixers import ProxyFix
    app.wsgi_app = ProxyFix(app.wsgi_app,
                            num_proxies=app.config['PROXY_FIX_NUM_PROXIES'])

redis_conn = redis.from_url(app.config['REDIS_URL'])

cache = RedisCache(host=redis_conn)
csrf = CSRFProtect(app)
db = SQLAlchemy(app)
migrate = Migrate(app, db)

from wuvt.auth import AuthManager
auth_manager = AuthManager()
auth_manager.db = db
auth_manager.init_app(app)

if len(app.config['SENTRY_DSN']) > 0:
    sentry_sdk.init(app.config['SENTRY_DSN'],
                    integrations=[
                        FlaskIntegration(),
Exemplo n.º 37
0
def deploy_sandbox_shared_setup(log, verbose=True, app=None, exp_config=None):
    """Set up Git, push to Heroku, and launch the app."""
    if verbose:
        out = None
    else:
        out = open(os.devnull, "w")

    config = get_config()
    if not config.ready:
        config.load()
    heroku.sanity_check(config)
    (heroku_app_id, tmp) = setup_experiment(
        log, debug=False, app=app, exp_config=exp_config
    )

    # Register the experiment using all configured registration services.
    if config.get("mode") == "live":
        log("Registering the experiment on configured services...")
        registration.register(heroku_app_id, snapshot=None)

    # Log in to Heroku if we aren't already.
    log("Making sure that you are logged in to Heroku.")
    heroku.log_in()
    config.set("heroku_auth_token", heroku.auth_token())
    log("", chevrons=False)

    # Change to temporary directory.
    cwd = os.getcwd()
    os.chdir(tmp)

    # Commit Heroku-specific files to tmp folder's git repo.
    git = GitClient(output=out)
    git.init()
    git.add("--all")
    git.commit('"Experiment {}"'.format(heroku_app_id))

    # Initialize the app on Heroku.
    log("Initializing app on Heroku...")
    team = config.get("heroku_team", None)
    heroku_app = HerokuApp(dallinger_uid=heroku_app_id, output=out, team=team)
    heroku_app.bootstrap()
    heroku_app.buildpack("https://github.com/stomita/heroku-buildpack-phantomjs")

    # Set up add-ons and AWS environment variables.
    database_size = config.get("database_size")
    redis_size = config.get("redis_size")
    addons = [
        "heroku-postgresql:{}".format(quote(database_size)),
        "heroku-redis:{}".format(quote(redis_size)),
        "papertrail",
    ]
    if config.get("sentry"):
        addons.append("sentry")

    for name in addons:
        heroku_app.addon(name)

    heroku_config = {
        "aws_access_key_id": config["aws_access_key_id"],
        "aws_secret_access_key": config["aws_secret_access_key"],
        "aws_region": config["aws_region"],
        "auto_recruit": config["auto_recruit"],
        "smtp_username": config["smtp_username"],
        "smtp_password": config["smtp_password"],
        "whimsical": config["whimsical"],
        "DASHBOARD_PASSWORD": fake.password(length=20, special_chars=False),
        "DASHBOARD_USER": config.get("dashboard_user", "admin"),
        "FLASK_SECRET_KEY": codecs.encode(os.urandom(16), "hex"),
    }

    # Set up the preferred class as an environment variable, if one is set
    # This is needed before the config is parsed, but we also store it in the
    # config to make things easier for recording into bundles.
    preferred_class = config.get("EXPERIMENT_CLASS_NAME", None)
    if preferred_class:
        heroku_config["EXPERIMENT_CLASS_NAME"] = preferred_class

    heroku_app.set_multiple(**heroku_config)

    # Wait for Redis database to be ready.
    log("Waiting for Redis...")
    ready = False
    while not ready:
        try:
            r = redis.from_url(heroku_app.redis_url)
            r.set("foo", "bar")
            ready = True
        except (ValueError, redis.exceptions.ConnectionError):
            time.sleep(2)

    log("Saving the URL of the postgres database...")
    config.extend({"database_url": heroku_app.db_url})
    config.write()
    git.add("config.txt")
    time.sleep(0.25)
    git.commit("Save URL for database")
    time.sleep(0.25)

    log("Generating dashboard links...")
    heroku_addons = heroku_app.addon_parameters()
    heroku_addons = json.dumps(heroku_addons)
    if six.PY2:
        heroku_addons = heroku_addons.decode("utf-8")
    config.extend({"infrastructure_debug_details": heroku_addons})
    config.write()
    git.add("config.txt")
    time.sleep(0.25)
    git.commit("Save URLs for heroku addon management")
    time.sleep(0.25)

    # Launch the Heroku app.
    log("Pushing code to Heroku...")
    git.push(remote="heroku", branch="HEAD:master")

    log("Scaling up the dynos...")
    default_size = config.get("dyno_type")
    for process in ["web", "worker"]:
        size = config.get("dyno_type_" + process, default_size)
        qty = config.get("num_dynos_" + process)
        heroku_app.scale_up_dyno(process, qty, size)
    if config.get("clock_on"):
        heroku_app.scale_up_dyno("clock", 1, size)

    time.sleep(8)

    # Launch the experiment.
    log("Launching the experiment on the remote server and starting recruitment...")
    launch_url = "{}/launch".format(heroku_app.url)
    log("Calling {}".format(launch_url), chevrons=False)
    launch_data = _handle_launch_data(launch_url, error=log)
    result = {
        "app_name": heroku_app.name,
        "app_home": heroku_app.url,
        "dashboard_url": "{}/dashboard/".format(heroku_app.url),
        "recruitment_msg": launch_data.get("recruitment_msg", None),
    }
    log("Experiment details:")
    log("App home: {}".format(result["app_home"]), chevrons=False)
    log("Dashboard URL: {}".format(result["dashboard_url"]), chevrons=False)
    log(
        "Dashboard user: {}".format(heroku_config.get("DASHBOARD_USER")), chevrons=False
    )
    log(
        "Dashboard password: {}".format(heroku_config.get("DASHBOARD_PASSWORD")),
        chevrons=False,
    )

    log("Recruiter info:")
    log(result["recruitment_msg"], chevrons=False)

    # Return to the branch whence we came.
    os.chdir(cwd)

    log(
        "Completed Heroku deployment of experiment ID {} using app ID {}.".format(
            config.get("id"), heroku_app_id
        )
    )
    return result
Exemplo n.º 38
0
import os, sys

from environs import Env

sys.path.append(os.path.dirname(os.path.realpath(__file__)))
import redis
from rq import Worker, Queue, Connection

env = Env()
env.read_env()

listen = ['default']

redis_url = env.str("REDISTOGO_URL", default="redis://localhost:6379")

conn = redis.from_url(redis_url)

if __name__ == '__main__':
    with Connection(conn):
        worker = Worker(list(map(Queue, listen)))
        worker.work(with_scheduler=True)
Exemplo n.º 39
0
# Braintree settings
BRAINTREE_MERCHANT_ID = 'wpb3qct435mfmfqp'
BRAINTREE_PUBLIC_KEY = 's2g7pgqyz7x7z656'  # Merchant ID
# Public Key
BRAINTREE_PRIVATE_KEY = 'e662bef77c6008f8491262b9b9d30024'  # Private key

from braintree import Configuration, Environment

Configuration.configure(Environment.Sandbox, BRAINTREE_MERCHANT_ID,
                        BRAINTREE_PUBLIC_KEY, BRAINTREE_PRIVATE_KEY)

# REDIS Settings
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
REDIS_DB = 1

import redis

redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')
redis = redis.from_url(redis_url)

CHANNEL_LAYERS = {
    "default": {
        "BACKEND": "asgi_redis.RedisChannelLayer",
        "CONFIG": {
            "hosts": [os.environ.get('REDIS_URL', 'redis://localhost:6379')],
        },
        "ROUTING": "chat.routing.channel_routing",
    },
}
Exemplo n.º 40
0
# coding=utf-8
import telebot
from emoji import emojize
import redis
from redis import StrictRedis

r = redis.from_url(
    'redis://*****:*****@ec2-54-247-139-72.eu-west-1.compute.amazonaws.com:23429'
)

TOKEN = '204715944:AAHhnfXiWQDcpKAPiCRuv0GVOENNoDQylvw'
bot = telebot.TeleBot(TOKEN)

heart = emojize(':heart:', use_aliases=True)
right = emojize(':right_arrow:', use_aliases=True)
left = emojize(':left_arrow:', use_aliases=True)
ledger = emojize(':ledger:', use_aliases=True)
phone = emojize(':speech_balloon:', use_aliases=True)
faqq = emojize(':page_facing_up:', use_aliases=True)
info = emojize(':information:', use_aliases=True)
house = emojize(':house:', use_aliases=True)

r.set(int(0), "Лера\nhttps://telegra.ph/file/fb134b7947d17e4522981.png"
      "\n\nStatus: ")

r.set(int(1), "Софи\nhttps://telegra.ph/file/2848b75621de5868a9814.png"
      "\n\nStatus: ")

r.set(
    int(2), "Виктория\nhttps://telegra.ph/file/59880dca747f0556b3948.png"
    "\n\nStatus: ")
Exemplo n.º 41
0
 def __init__(self):
     self.client = redis.from_url(settings.REDIS_URL)
Exemplo n.º 42
0
 def __init__(self, secret, redis_uri="redis://localhost:6379"):
     if secret is None:
         raise ValueError("Secret required")
     print "secret", secret, redis_uri
     self.redis = redis.from_url(redis_uri)
     self.secret = secret
Exemplo n.º 43
0
import json
import os
import threading
import urllib

from dropbox import Dropbox, DropboxOAuth2Flow
from dropbox.files import DeletedMetadata, FolderMetadata, WriteMode
from flask import abort, Flask, redirect, render_template
from flask import Response, request, session, url_for
import redis

from data_cleaning_tools import clean_data

redis_url = os.environ['REDISTOGO_URL']
print("hello world! this is the redis url: {}".format(redis_url))
redis_client = redis.from_url(redis_url)

# App key and secret from the App console (dropbox.com/developers/apps)
APP_KEY = os.environ['APP_KEY']
APP_SECRET = os.environ['APP_SECRET']

app = Flask(__name__)
app.debug = True

# A random secret used by Flask to encrypt session data cookies
app.secret_key = os.environ['FLASK_SECRET_KEY']


def get_url(route):
    '''Generate a proper URL, forcing HTTPS if not running locally'''
    print("GET_URL...")
Exemplo n.º 44
0
 def __init__(self, setting, crawler):
     RetryMiddleware.__init__(self, setting)
     self.rconn = redis.from_url(setting['REDIS_URL'],
                                 db=1,
                                 decode_response=True)
     init_cookie(self.rconn, crawler.spider.name)
Exemplo n.º 45
0
def log_post(id):
    r = redis.from_url(os.environ.get("REDIS_URL"))
    r.set(id, 'true')
Exemplo n.º 46
0
    app.debug = True
    app.config['DEBUG'] = True
    app.config["DEBUG_TB_INTERCEPT_REDIRECTS"] = False
    app.config["SQLALCHEMY_RECORD_QUERIES"] = True
    app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
    toolbar = DebugToolbarExtension(app)

# gzip responses
Compress(app)
app.config["COMPRESS_DEBUG"] = compress_json

# for running rq jobs
ti_queues = []

redis_rq_conn = redis.from_url(os.getenv("REDIS_URL",
                                         "redis://127.0.0.1:6379"),
                               db=0)

for i in range(0, 2):  # number of queues to spin up
    ti_queues.append(Queue("ti-queue-{}".format(i), connection=redis_rq_conn))

# aws s3 connection
s3_conn = boto.connect_s3(os.getenv("AWS_ACCESS_KEY_ID"),
                          os.getenv("AWS_SECRET_ACCESS_KEY"))
requests_cache_bucket = s3_conn.get_bucket('tng-requests-cache')

# imports got here for tables that need auto-created.
# import publication
# import version
#
# db.create_all()
Exemplo n.º 47
0
try:
    with urllib.request.urlopen("https://raw.githubusercontent.com/corbindavenport/tootbot/update-check/current-version.txt") as url:
        s = url.read()
        new_version = s.decode("utf-8").rstrip()
        current_version = 2.4  # Current version of script
        if (current_version < float(new_version)):
            print('[WARN] A new version of Tootbot (' + str(new_version) + ') is available! (you have ' + str(current_version) + ')')
            print('[WARN] Get the latest update from here: https://github.com/corbindavenport/tootbot/releases')
        else:
            print('[ OK ] You have the latest version of Tootbot (' + str(current_version) + ')')
    url.close()
except BaseException as e:
    print('[EROR] Error while checking for updates:', str(e))
# Connect to Redis database
try:
    r = redis.from_url(os.environ.get("REDIS_URL"))
except BaseException as e:
    print('[EROR] Error while connecting to Redis:', str(e))
    print('[EROR] Tootbot cannot continue, now shutting down')
    exit()
# General settings
DELAY_BETWEEN_TWEETS = int(os.environ.get('DELAY_BETWEEN_POSTS', None))
POST_LIMIT = int(os.environ.get('POST_LIMIT', None))
SUBREDDIT_TO_MONITOR = os.environ.get('SUBREDDIT_TO_MONITOR', None)
NSFW_POSTS_ALLOWED = bool(distutils.util.strtobool(
    os.environ.get('NSFW_POSTS_ALLOWED', None)))
SPOILERS_ALLOWED = bool(distutils.util.strtobool(
    os.environ.get('SPOILERS_ALLOWED', None)))
SELF_POSTS_ALLOWED = bool(distutils.util.strtobool(
    os.environ.get('SELF_POSTS_ALLOWED', None)))
if os.environ.get('HASHTAGS', None) == 'false':
Exemplo n.º 48
0
def duplicate_check(id):
    r = redis.from_url(os.environ.get("REDIS_URL"))
    if r.get(id):
        return True
    else:
        return False
Exemplo n.º 49
0
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'posthog.settings')

app = Celery('posthog')

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()

# Connect to our Redis instance to store the heartbeat
redis_instance = redis.from_url(settings.REDIS_URL, db=0)


@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
    # Heartbeat every 10sec to make sure the worker is alive
    sender.add_periodic_task(10.0,
                             redis_heartbeat.s(),
                             name='10 sec heartbeat')
    sender.add_periodic_task(
        crontab(day_of_week='mon,fri'),  # check twice a week
        update_event_partitions.s(),
    )
    sender.add_periodic_task(15 * 60, calculate_cohort.s(), name='debug')

Exemplo n.º 50
0
import arrow
import logging
import os
import requests
import redis
import sys

logger = logging.getLogger('osm')
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
redis_url = os.environ.get("REDIS_URL")
r = redis.from_url(redis_url) if redis_url else dict()

new_user_json_url = 'https://s3.amazonaws.com/data.openstreetmap.us/users/newest.json'
slack_url = os.environ.get('SLACK_WEBHOOK_URL')


def send_to_slack(message):
    logger.info("Telling Slack: %s", message)
    return requests.post(slack_url, json={'text': message})


def interesting_change(feature):
    props = feature.get('properties')
    contains = props.get('inside')

    if not contains:
        return False
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     self.session = requests.Session()
     self.redis = redis.from_url(settings.REDIS_URL)
     self.to_save = []
Exemplo n.º 52
0
2) Some classes instantiated (including DG auth, Google Cloud Storage) 
create complex local context and require asyncio event
scheduling that cannot be pickled and therefore cannot be queued.
Via these wrappers, the context is created only in the worker process.
"""
import json
import os

import redis

from grabbers import base, dg, landsat, planet_grabber

# Heroku provides the env variable REDIS_URL for Heroku redis;
# the default redis://redis_db:6379 points to the local docker redis
redis_url = os.getenv('REDIS_URL', 'redis://redis_db:6379')
connection = redis.from_url(redis_url, decode_responses=True)

PROVIDER_CLASSES = {
    'digital_globe': dg.DGImageGrabber,
    'landsat': landsat.LandsatThumbnails,
    'planet': planet_grabber.PlanetGrabber
}


def pull(db_key, provider, bbox, **specs):
    """Pull an image."""
    grabber = PROVIDER_CLASSES[provider](**specs)
    looped = base.loop(grabber.pull)
    records = looped(bbox)
    reformatted = _format_exceptions(*records)
    connection.set(db_key, json.dumps(reformatted))
Exemplo n.º 53
0
#!/usr/bin/env python

import os
import json
import redis
from flask import Flask
from flask import request
from linkextractor import extract_links

app = Flask(__name__)
redis_conn = redis.from_url(os.getenv("REDIS_URL", "redis://*****:*****@app.route("/")
def index():
    return "Usage: http://<hostname>[:<prt>]/api/<url>"


@app.route("/api/<path:url>")
def api(url):
    qs = request.query_string.decode("utf-8")
    if qs != "":
        url += "?" + qs

    jsonlinks = redis_conn.get(url)
    if not jsonlinks:
        links = extract_links(url)
        jsonlinks = json.dumps(links, indent=2)
        redis_conn.set(url, jsonlinks)

    response = app.response_class(status=200,
Exemplo n.º 54
0
from rq import Connection, Queue
import redis
import requests
from flask import Flask, request, url_for, abort, \
     render_template, send_from_directory, Response, redirect

from store import store_number

app = Flask(__name__, template_folder='templates')

app.config.update(dict(
    DEBUG=True,
    ENVIRONMENT="development",
))

conn = redis.from_url('redis://*****:*****@app.route('/trait')
def trait():
    return render_template('trait.html')


@app.route('/insert', methods=["GET", "POST"])
def insert():
    data = dict(request.get_json())['data']
    if "value" not in data:
        return json.dumps({
            'success': False,
            'err': 'value is missing'
Exemplo n.º 55
0
def end_rate_limit(page):
    r = redis.from_url(os.environ.get("REDIS_URL"))

    r.set(redis_key(page, 'started'), pickle.dumps(None))
    r.set(redis_key(page, 'finished'), pickle.dumps(datetime.utcnow()))
Exemplo n.º 56
0
import os
import logging
import coloredlogs
from configparser import ConfigParser
from flask import Flask, escape, request, jsonify
import redis as redis_

from cloudapp import cloudapp
logger = logging.getLogger(__name__)
coloredlogs.install(level='INFO')
config = ConfigParser()
config.read('config.cfg')
app = Flask(__name__)
redis = redis_.from_url(config.get('database', 'redis'))
cloud = cloudapp(config.get('cloudapp', 'acc'),
                 config.get('cloudapp', 'passwd'))


def get_auth(auth_key):
    result = list(redis.scan_iter('*'))
    collect = []
    collect_auth = []
    for data in result:
        collect.append(data.decode())
        # data.decode()
    for d in collect:
        collect_auth.append(redis.get(d).decode())

    logger.info(collect_auth)

    if auth_key in collect_auth:
Exemplo n.º 57
0
import os
from flask import Flask, render_template, request, redirect, jsonify
import shortener
import redis
from scout_apm.flask import ScoutApm

app = Flask(__name__)

ScoutApm(app)

db = redis.from_url(os.environ['REDISCLOUD_URL'])
# db=redis.Redis(host='localhost', port=6379, password='')


@app.route('/')
def main():
    return render_template('index.html')


@app.route('/checkcustom/<url>')
def checkcustom(url):
    response = db.hexists("custom", url)
    print(type(response))
    if (response):
        return "1"
    else:
        return "0"


@app.route('/shorten/', methods=['POST', 'GET'])
def shorten():
Exemplo n.º 58
0
 def __init__(self, bot):
     self.bot = bot
     self.r = redis.from_url(os.environ['REDIS_URL'])
Exemplo n.º 59
0
from flask import Flask, render_template, request, jsonify, url_for, redirect
from flask_caching import Cache
import search
import os
import redis

app = Flask(__name__)

# cache = Cache(app, config={"CACHE_TYPE": "simple"})
cache = redis.from_url(os.environ["REDISCLOUD_URL"])


@app.route("/", methods=["POST", "GET"])
def index():
    # check if Lobby Name form is submitted via POST
    if request.method == "POST":
        lobby_name = request.form["Lobby Name"]
        return redirect(url_for("show_lobby_name", lobby_name=lobby_name))
    # otherwise return regular index.html
    return render_template("index.html")


# opens a new lobby page with given lobby name
@app.route("/lobby/<lobby_name>")
def show_lobby_name(lobby_name):
    return render_template("lobby.html", name=lobby_name)


@app.route("/test")
def show_test():
    return render_template("test.html")
def run_worker():
    redis_url = app.config['REDIS_URL']
    redis_connection = redis.from_url(redis_url)
    with Connection(redis_connection):
        worker = Worker(app.config['QUEUES'])
        worker.work()