Example #1
0
 def filter_queues(queue_names):
     return [qname for qname in queue_names if Queue(qname) in qs]
Example #2
0
from flask import Flask
from flask import request

import redis
from rq import Queue
from worker import conn

app = Flask(__name__)
channelSecret = os.environ['LINE_CHANNEL_SECRET']
channelAccessToken = os.environ['LINE_CHANNEL_ACCESS_TOKEN']

redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')

conn = redis.from_url(redis_url)
defaultQueue = Queue('default', connection=conn)


def verifySignature(signature, requestBody):
    digest = hmac.new(channelSecret, requestBody, hashlib.sha256).digest()
    return signature == base64.b64encode(digest)


def replyText(token, text):
    headers = {
        'Content-Type': 'application/json',
        'Authorization': 'Bearer ' + channelAccessToken
    }
    data = {'replyToken': token, 'messages': [{'type': 'text', 'text': text}]}
    requests.post('https://api.line.me/v2/bot/message/reply',
                  headers=headers,
Example #3
0
 def queue_up(self):
     if self.func is not None and self.job is None:
         que = Queue(connection=Redis)
         self.job = que.enqueue(self.func, self.args)
         self.queued_blocks += 1
Example #4
0
def _get_queue(name="default"):
    # Connect to Redis Queue
    return Queue(name, connection=redis_conn)
Example #5
0
from flask import Flask, render_template, request, redirect, url_for, flash
from werkzeug.utils import secure_filename
from pathlib import Path
import os
import redis
from rq import Queue
from task import background_task
import subprocess
import yaml
import datetime

app = Flask(__name__)
app.secret_key = b'KURAMA\n\xec]/'
r = redis.Redis()
q = Queue(connection=r)

HOME = str(Path.home())
APP_UPLOAD_FOLDER = os.path.join(HOME, '.kappc')
SERVICES_REQUIRED = []

if not os.path.exists(APP_UPLOAD_FOLDER):
    try:
        cp = subprocess.run('mkdir ' + APP_UPLOAD_FOLDER,
                            shell=True,
                            check=True)
        if cp.returncode == 0:
            print(f'{APP_UPLOAD_FOLDER} App Upload Folder Created')
    except subprocess.CalledProcessError as e:
        print(e.stderr)

app.config['APP_UPLOAD_FOLDER'] = APP_UPLOAD_FOLDER
Example #6
0
 def test_enqueue_sets_status(self):
     """Enqueueing a job sets its status to "queued"."""
     q = Queue()
     job = q.enqueue(say_hello)
     self.assertEqual(job.status, Status.QUEUED)
Example #7
0
# Import tasks
from tasks import slowTask, runFinalTask

# Get environment stuff (Should use container environmetn variables)
from environment import env

# Initialized flask app
app = Flask(__name__)

## It should be able to get the dashboard integrated to the flask app, but I can't figure out how :(
# app.config.from_object(rq_dashboard.default_settings)
# app.register_blueprint(rq_dashboard.blueprint, redis_url='redis://' + env['redis']['address'] + ':6379', url_prefix='/rq')

# connect to redis and create queue
redis_conn = Redis(host=env['redis']['address'], port=6379, db=0)
q = Queue('rq-server', connection=redis_conn)


# Just say hi
@app.route('/hello')
def hello():
    """Test endpoint"""
    return {'hello': 'world'}


# Run a single task
@app.route('/single', methods=['POST'])
def singleTask():
    content = request.get_json()
    job = q.enqueue(slowTask, content['time'])
    time.sleep(content['time'] + 1)
Example #8
0
 def test_create_default_queue(self):
     """Instantiating the default queue."""
     q = Queue()
     self.assertEquals(q.name, 'default')
Example #9
0
 def test_create_queue(self):
     """Creating queues."""
     q = Queue('my-queue')
     self.assertEquals(q.name, 'my-queue')
Example #10
0
from flask_restplus import Resource, Namespace, reqparse, fields

from rq import Queue
from rq.job import Job
from rq.registry import StartedJobRegistry, FinishedJobRegistry
from redisQworker import conn

from model import train, predict
import config
import datetime

id_time_format = "%Y-%m-%d %H:%M:%S"
train_job_id = "train_job"
train_new_job_id = "new_train_job"
log = logging.getLogger(__name__)
q_train = Queue(connection=conn, name='train')
registr_train = StartedJobRegistry('train', connection=conn)

started_registry = StartedJobRegistry('train', connection=conn)
finished_registry = FinishedJobRegistry('train', connection=conn)


def init_time_id(id_time_format=id_time_format):
    return datetime.datetime.utcnow().strftime(id_time_format)


def sort_id_time(list_id_time, id_time_format=id_time_format, reverse=False):
    list_datetime = [
        datetime.datetime.strptime(x, id_time_format) for x in list_id_time
    ]
    list_datetime = sorted(list_datetime, reverse=reverse)
Example #11
0
import time
import atexit
import hashlib
import multiprocessing

from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger
from redis import Redis
from rq import Queue, Worker, Connection

redis = Redis()
listen = ['default']
q = Queue(connection=redis)


scheduler = BackgroundScheduler()

from models import db, operation_remote, operation_local



def enqueue_sync():
    result = q.enqueue(db_sync)

scheduler.add_job(
    func=enqueue_sync,
    trigger=IntervalTrigger(seconds=5),
    id='db_sync',
    name='Sync database every five seconds',
    replace_existing=True)
Example #12
0
    access_token_method='POST',
    request_token_params={
        'response_type': 'code',
        'scope': 'https://www.googleapis.com/auth/userinfo.email'
    },
    access_token_params={'grant_type': 'authorization_code'})

# Create the database connection
db = MongoKit(app)

# Create the Redis connection
import redis
from rq import Queue

redis_connection = redis.from_url(app.config.get("REDIS_URL"))
queue = Queue('default', connection=redis_connection, default_timeout=600)

from rq_scheduler import Scheduler

scheduler = Scheduler(queue_name="default", connection=redis_connection)

# Setup RQ Dashboard
from rq_dashboard import RQDashboard

RQDashboard(app)

# Create the Flask-Login manager
login_manager = LoginManager()
from bson.objectid import ObjectId

Example #13
0
 def test_create_worker(self):
     """Worker creation."""
     fooq, barq = Queue('foo'), Queue('bar')
     w = Worker([fooq, barq])
     self.assertEquals(w.queues, [fooq, barq])
Example #14
0
from rq.job import Job
from bs4 import BeautifulSoup
from selenium import webdriver
from flask_sqlalchemy import SQLAlchemy
from elasticsearch import Elasticsearch
from sqlalchemy.ext.declarative import declarative_base
from flask import Flask, flash, redirect, render_template, request, session, url_for, send_from_directory, send_file, Response, make_response
# import my python scripts for extensions
from ext_sandbox import EXT_Sandbox, sandbox_run
from ext_analyze import EXT_Analyze, static_run
from ext_yara import EXT_yara, yara_run, retrohunt_run

app = Flask(__name__)
es = Elasticsearch()
r = redis.Redis()
q = Queue(connection=r, default_timeout=1800)

app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///crxhunt.db'
app.secret_key = "changethiskey1337"

db = SQLAlchemy(app)
Base = declarative_base()
Base.query = db.session.query_property()

Build_Ver = "Alpha"


class User(db.Model):
    """ Create user table"""
    id = db.Column(db.Integer, primary_key=True)
    username = db.Column(db.String(80), unique=True)
Example #15
0
 def test_init(self):
     """Scheduler can be instantiated with queues or queue names"""
     foo_queue = Queue('foo', connection=self.testconn)
     scheduler = RQScheduler([foo_queue, 'bar'], connection=self.testconn)
     self.assertEqual(scheduler._queue_names, {'foo', 'bar'})
     self.assertEqual(scheduler.status, RQScheduler.Status.STOPPED)
Example #16
0
    cache = os.path.join("/tmp", "pkg_cache")
    try:
        shutil.rmtree(repo)
        shutil.rmtree(cache)
        shutil.rmtree('/opt/antergos-packages')
    except Exception:
        pass
    db.set('idle', "True")
    db.set('building', 'Idle')
    db.set('container', '')
    db.set('building_num', '')
    db.set('building_start', '')


with Connection(db):
    queue = Queue('build_queue')
    w = Worker([queue], exc_handler=handle_worker_exception)


def stream_template(template_name, **context):
    app.update_template_context(context)
    t = app.jinja_env.get_template(template_name)
    rv = t.stream(context)
    #rv.enable_buffering(5)
    rv.disable_buffering()
    return rv


def get_log_stream(bnum=None):
    #doc = docker.Client(base_url='unix://var/run/docker.sock', version='1.12', timeout=10)
    is_idle = db.get('idle')
Example #17
0
from .config import (
    DATABASE_URI,
    ELASTICSEARCH_URI,
    LOGGING_CONFIG,
    REDIS_URI,
    SECRET_KEY,
)
from .middleware import SerializationMiddleware, SQLAlchemySessionManager

# Logging
logging.config.dictConfig(LOGGING_CONFIG)

# Redis
redis_conn = redis.StrictRedis.from_url(REDIS_URI)
q: Queue = Queue(connection=redis_conn)

# Elasticsearch
es = Elasticsearch(ELASTICSEARCH_URI) if ELASTICSEARCH_URI else None
# TODO might have to load a different configuration for production, look into

# SQLAlchemy
db: SQLAlchemy = SQLAlchemy(DATABASE_URI)
from app import models  # noqa

# Authentication
from .utilities import user_loader  # noqa

auth_backend = JWTAuthBackend(user_loader, secret_key=SECRET_KEY)
auth_middleware = FalconAuthMiddleware(auth_backend,
                                       exempt_routes=["/swagger"])
Example #18
0
def async_queue(mock_redis, mocker):
    q = Queue("jobs", connection=mock_redis)
    mocker.patch.object(queuing, "_get_queue", return_value=q)
    return q
Example #19
0
 def _get_job(self):
     queue = Queue(self.task_name, connection=self.redis_conn)
     return queue.dequeue()
Example #20
0
 def __init__(self):
     self.r = redis.Redis(host='127.0.0.1', port=6379, db=0)
     self.q = Queue(connection=self.r)
Example #21
0
import http
import util
import gevent
import redis
import aliyun
import sys
reload(sys)
sys.setdefaultencoding('utf8')

api_list_config = initConfig()
r = redis.StrictRedis(config.get("redis", "store_host"),
                      db=config.get("redis", "store_db"))
scheduler = Scheduler(connection=Redis(config.get("redis", "host")),
                      queue_name=config.get("redis", "Queue"))

q = Queue(connection=Redis(config.get("redis", "host")),
          name=config.get("redis", "Log"))

logger = logging.getLogger('worker')
LOG_FILENAME = config.get("log", "LOG_PATH") + 'get_api.log'
exec('logger.setLevel(%s)' % config.get('log', 'log_level'))
format = "%(asctime)s %(filename)s %(lineno)d %(levelname)s %(message)s"
handler = logging.handlers.TimedRotatingFileHandler(LOG_FILENAME, "midnight",
                                                    1, 30)
handler.setFormatter(logging.Formatter(format))
handler.suffix = "%Y%m%d"
logger.addHandler(handler)


def handle(cdn, ts, domains):

    for l in api_list_config.items(cdn):
Example #22
0
def get_queue():
    global __queue
    if not __queue:
        __queue = Queue(name=QUEUE_NAME, connection=get_connection())
    return __queue
Example #23
0
 def __init__(self):
     self.q = Queue(connection=Redis(host=settings['RQ_HOST'],
                                     port=settings['RQ_PORT']))
     self.table_name = 'content'
     self.media = settings['DYNAMODB_COMID']
Example #24
0
from datetime import datetime, timedelta
from redis import Redis
from rq import Queue
from django.contrib.auth.models import User, Group
from rest_framework import serializers
from rq_scheduler import Scheduler
from snippets.models import Snippet, CourseList, CoursePage, CourseUsers,\
    ReMessage, UserProfile, GradeList
from snippets.tasks import test_job, send_mail_reg, send_mail_note, send_mail_note1
from tutorial.settings import BASE_URL
from utils.token_generator import token_generator, create_email_confirm_url

queue = Queue(connection = Redis())
sheduler = Scheduler(connection=Redis())


class SnippetSerializer(serializers.HyperlinkedModelSerializer):
    # highlight = serializers.HyperlinkedIdentityField(view_name='snippet-highlight', format='html')
    class Meta:
        model = Snippet
        # fields = ('id', 'title', 'code', 'linenos', 'language', 'style', 'owner')
        fields = ('url', 'id', 'title', 'owner')


class CreateSnippetSerializer(serializers.ModelSerializer):
    owner = serializers.ReadOnlyField(source='owner.username')
    # highlight = serializers.HyperlinkedIdentityField(view_name='snippet-highlight', format='html')

    class Meta:
        model = Snippet
        fields = (
Example #25
0
from flask import Flask, request
from parse_hook import *
app = Flask(__name__)
from redis import Redis
from rq import Queue

q = Queue('normal', connection=Redis('192.168.99.100', 6379))


@app.route('/')
def hello_world():
    return 'Hello World!'


@app.route('/githook', methods=['GET', 'POST'])
def githook():
    print "githook received...",
    data = request.get_json()
    commit = data.get("after")
    print "data decoded...",
    print commit,
    result = q.enqueue(handle_push, data)
    print "job added to queue\n"
    return 'Hook received and added to work queue'


if __name__ == '__main__':
    app.run(host='0.0.0.0')
Example #26
0
import os
from redis import Redis
from rq import Queue
from worker import handle_job
q = Queue(connection=Redis(
    host='redis-15749.c16.us-east-1-3.ec2.cloud.redislabs.com',
    port=15749,
    password='******'))

items = """
Ads.mopub.com
cm.ushareit.com
soma.smaato.net
ads.rubiconproject.com
m.addthis.com
s7.addthis.com
Adnxs.com
me-cdn.effectivemeasure.net
adsrvr.org
app.adjust.com 
app-measurement.com
Scorecardresearch.com
casalemedia.com
askfm.adspirit.de
alwatanvoice.com
shobiddak.com
yasour.org
www.xnxx.com
www.xvideos.com
m.xhamster.com
shahid.mbc.net
from flask import Flask, redirect, flash, request, url_for
import os
from rq import Queue
from rq.job import Job
from ffmp import RunFFmpeg
from worker import conn
import uuid
app = Flask(__name__)
q = Queue(connection=conn)


@app.route("/")
def index():
    return "hello world"


@app.route('/upload')
def file_upload():
    return '''
    <!doctype html>
    <title>Upload new File</title>
    <h1>Upload new File</h1>
    <form method=post enctype=multipart/form-data action=/uploads>
      <input type=file name=file>

       <input type="text" name="time" id="url-box" placeholder="Enter time..." style="max-width: 300px;">

      <input type=submit value=Upload>
    </form>
    '''
Example #28
0
import json
from StringIO import StringIO

from pybossa.forms.admin_view_forms import *
from pybossa.news import NOTIFY_ADMIN
from pybossa.model.task_run import TaskRun
from pybossa.mongo import task_run_mongo
from bson import json_util
from pybossa.repositories import TaskRepository
from pybossa.core import db
from pybossa.core import task_repo
import json

blueprint = Blueprint('admin', __name__)

DASHBOARD_QUEUE = Queue('super', connection=sentinel.master)


def format_error(msg, status_code):
    """Return error as a JSON response."""
    error = dict(error=msg, status_code=status_code)
    return Response(json.dumps(error),
                    status=status_code,
                    mimetype='application/json')


@blueprint.route('/')
@login_required
@admin_required
def index():
    """List admin actions."""
Example #29
0
 def _get_queue(redis_connect, queue_name):
     logger.info("Getting Redis queue...")
     return Queue(queue_name, connection=redis_connect)
if __name__ == '__main__':
    start_time = datetime.now()
    print("Start:", start_time)

    folders = glob.glob(os.path.join(LOCAL_PATH_PREFIX, 'BLOND-50/*/*'),
                        recursive=True)
    folders += glob.glob(os.path.join(LOCAL_PATH_PREFIX, 'BLOND-250/*/*'),
                         recursive=True)
    folders = [os.path.relpath(d, LOCAL_PATH_PREFIX) for d in folders]

    total_jobs = len(folders)
    done_jobs = 0

    print("Enqueueing {} folders...".format(total_jobs))
    q = Queue(connection=Redis())
    for folder in folders:
        q.enqueue_call(compute_one_second_data_summary,
                       args=(folder, WORKER_PATH_PREFIX, RESULTS),
                       timeout=2**31 - 1)

    results_q = Queue(connection=Redis(), name='results')

    print("Processing...")
    with progressbar.ProgressBar(max_value=total_jobs,
                                 redirect_stdout=False,
                                 redirect_stderr=False) as bar:
        while True:
            done_jobs += update_results(results_q)
            bar.update(done_jobs)
            if total_jobs == done_jobs: