Exemplo n.º 1
0
def add_to_list(key, value):
    setup_rq_connection()
    keytype = get_current_connection().type(key)
    if keytype == 'none':
        set_value(key, [value,])
    elif keytype == 'list':
        get_current_connection().rpush(key, value)
Exemplo n.º 2
0
def get_value(key):
    setup_rq_connection()
    keytype = get_current_connection().type(key)
    if keytype == 'list':
        return get_current_connection().lrange(key, 0, -1)
    else:
        return get_current_connection().get(key)
Exemplo n.º 3
0
def set_value(key, value):
    setup_rq_connection()
    if value is None:
        delete_key(key)
    elif type(value) in [int, float, long, bool, str, unicode]:
        get_current_connection().set(key, value)
    elif type(value) in [list, tuple]:
        delete_key(key)
        for entry in value:
            get_current_connection().rpush(key, entry)
    else:
        raise ValueError("Type '%s' not supported!" % type(value))
Exemplo n.º 4
0
def process(id_, data_dir, clean_in=10):
    """
    Job to remove the exif data from an uploaded image.
    
    The exif data is saved as a json file.
    
    If the image had an exif thumbnail, it is saved as a separate file.
    """
    path = os.path.join(data_dir, "{}.jpg".format(id_))
    exif = ExifImage(path)
    
    exif.thumb()
    exif.dump()
    exif.clean()
    
    job = get_current_job()
    
    # schedule the cleanup task
    now = datetime.datetime.now()
    scheduler = Scheduler(queue_name=job.origin, connection=get_current_connection())
    scheduler.enqueue_in(datetime.timedelta(minutes=clean_in), cleanup, id_, data_dir)
    
    removed_by = now+datetime.timedelta(minutes=clean_in)
    
    print("Added at: {}".format(now.isoformat()))
    print("Removed by: {}".format(removed_by.isoformat()))
    
    return {
        'thumb': exif.thumb_name,
        'json': exif.json_name,
        'removed_around': removed_by.isoformat()
    }
Exemplo n.º 5
0
def compliance_check(job_id, dataset, checker):
    try:
        redis = get_current_connection()
        cs = CheckSuite()
        if dataset.startswith('http'):
            dataset = check_redirect(dataset)
        ds = cs.load_dataset(dataset)
        score_groups = cs.run(ds, [], checker)

        rpair = score_groups[checker]
        groups, errors = rpair

        aggregates = cs.build_structure(checker, groups, dataset)
        aggregates = cs.serialize(aggregates)
        aggregates['all_priorities'] = sorted(aggregates['all_priorities'], key=lambda x: x['weight'], reverse=True)
        # We use b64 to keep the filenames safe but it's helpful to the user to see
        # the filename they uploaded
        if not aggregates['source_name'].startswith('http'):
            decoded = base64.b64decode(aggregates['source_name'].split('/')[-1])
            if isinstance(decoded, str):
                aggregates['source_name'] = decoded
            else:
                aggregates['source_name'] = decoded.decode('utf-8')
        aggregates['ncdump'] = ncdump(dataset)
        buf = json.dumps(aggregates)

        redis.set('processing:job:%s' % job_id, buf, 3600)
        return True
    except Exception as e:
        redis.set('processing:job:%s' % job_id, json.dumps({"error":type(e).__name__, "message":e.message}), 3600)
        return False
def setup_rq_connection():
    redis_conn = get_current_connection()
    if redis_conn == None:
        opts = OPTIONS.get('connection')
        logger.debug('Establishing Redis connection to DB %(db)s at %(host)s:%(port)s' % opts)
        redis_conn = Redis(**opts)
        push_connection(redis_conn)
Exemplo n.º 7
0
 def __init__(self, queue_name='default', interval=60, connection=None):
     if connection is None:
         connection = get_current_connection()
     self.connection = connection
     self.queue_name = queue_name
     self._interval = interval
     self.log = Logger('scheduler')
Exemplo n.º 8
0
def list_workers_api():
    workers_list = Worker.all()
    rq_workers = []
    for worker in workers_list:
        host_ip_using_name = "N/A"
        try:
            host_ip_using_name = socket.gethostbyname(worker.hostname)
        except socket.gaierror as addr_error:
            pass

        rq_workers.append({
            'worker_name':
            worker.name,
            'listening_on':
            ', '.join(queue.name for queue in worker.queues),
            'status':
            worker.get_state()
            if not is_suspended(get_current_connection()) else "suspended",
            'host_ip':
            host_ip_using_name,
            'current_job_id':
            worker.get_current_job_id(),
            'failed_jobs':
            worker.failed_job_count,
        })
    return {
        'data': rq_workers,
    }
Exemplo n.º 9
0
 def __init__(self, queue_name='default', interval=60, connection=None):
     if connection is None:
         connection = get_current_connection()
     self.connection = connection
     self.queue_name = queue_name
     self._interval = interval
     self.log = Logger('scheduler')
Exemplo n.º 10
0
def setup_rq_connection():
    redis_conn = get_current_connection()
    if redis_conn is None:
        opts = constants.REDIS_SETTINGS.get('connection')
        logger.debug('Establishing Redis connection to DB %(db)s at %(host)s:%(port)s' % opts)
        redis_conn = Redis(**opts)
        push_connection(redis_conn)
Exemplo n.º 11
0
    def assert_current_connection(self, expected_host, expected_port, expected_db):
        connection = connections.get_current_connection()
        connection_kwargs = connection.connection_pool.connection_kwargs

        self.assertEqual(connection_kwargs["host"], expected_host)
        self.assertEqual(connection_kwargs["port"], expected_port)
        self.assertEqual(connection_kwargs["db"], expected_db)
Exemplo n.º 12
0
def compliance_check(job_id, dataset, checker, path=None):
    '''
    Performs the Check Suite for the specified checker and sets the result in a
    redis result for the job_id

    :param str job_id: ID for the rq job
    :param dataset: Dataset handle
    :param str checker: Check Suite ID for a checker
    :param str path: Full path to dataset directory (OPeNDAP only)
    '''
    try:
        redis = get_current_connection()
        cs = CheckSuite()
        if dataset.startswith('http'):
            dataset = check_redirect(dataset)
        ds = cs.load_dataset(dataset)
        score_groups = cs.run(ds, [], checker)

        rpair = score_groups[checker]
        groups, errors = rpair

        aggregates = cs.build_structure(checker, groups, dataset)
        aggregates = cs.serialize(aggregates)
        aggregates['all_priorities'] = sorted(aggregates['all_priorities'],
                                              key=lambda x: x['weight'],
                                              reverse=True)
        # We use b64 to keep the filenames safe but it's helpful to the user to see
        # the filename they uploaded
        if not aggregates['source_name'].startswith('http'):
            decoded = base64.b64decode(
                aggregates['source_name'].split('/')[-1])
            if isinstance(decoded, str):
                aggregates['source_name'] = decoded
            else:
                aggregates['source_name'] = decoded.decode('utf-8')
        aggregates['ncdump'] = ncdump(dataset)
        buf = json.dumps(aggregates)

        # Write the report to a text file for download
        if path is None:
            # Must be a local file, get the path from the dataset
            path = os.path.dirname(dataset)
        fname = 'compliance_{}.txt'.format(job_id)
        output_filename = os.path.join(path, fname)
        with io.open(output_filename, 'w', encoding='utf-8') as f:
            with stdout_redirector(f):
                stdout_output(cs, score_groups, aggregates['source_name'])

        redis.set('processing:job:%s' % job_id, buf, 3600)

        return True

    except Exception as e:
        logger.exception("Failed to process job")
        error_message = {"error": type(e).__name__, "message": e.message}
        redis.set('processing:job:%s' % job_id, json.dumps(error_message),
                  3600)
        return False
Exemplo n.º 13
0
    def setup_redis_with_arguments(self, arguments):
        parser = rqworker.setup_parser()
        args = parser.parse_args(arguments)

        setup_default_arguments(args, {})

        self.current_connection = connections.get_current_connection()

        setup_redis(args)
Exemplo n.º 14
0
def _get_connection():
    setup_redis(type('lamdbaobject', (object,), {
        'host': settings.REDIS_HOST,
        'port': settings.REDIS_PORT,
        'db': settings.REDIS_DB,
        'url': settings.REDIS_URL,
        'password': settings.REDIS_PASSWORD,
    })())

    return get_current_connection()
Exemplo n.º 15
0
def resume_workers_api():
    if request.method == 'POST':
        try:
            resume(connection=get_current_connection())
        except ActionFailed:
            raise RQMonitorException('Unable to resume worker/s',
                                     status_code=500)

        return {'message': 'Successfully resumed all workers'}
    raise RQMonitorException('Invalid HTTP Request type', status_code=400)
Exemplo n.º 16
0
def _get_connection():
    setup_redis(
        type(
            'lamdbaobject', (object, ), {
                'host': settings.REDIS_HOST,
                'port': settings.REDIS_PORT,
                'db': settings.REDIS_DB,
                'url': settings.REDIS_URL,
                'password': settings.REDIS_PASSWORD,
                'socket': settings.REDIS_SOCKET,
            })())

    return get_current_connection()
Exemplo n.º 17
0
def log_duration(timestamp_id, **kwargs):
    '''
    A poor man's timer.
    There has to be a better way to time job batches...
    '''
    conn = get_current_connection()
    start_time = conn.get(timestamp_id)

    duration = time.time() - float(start_time)
    td = datetime.timedelta(seconds=round(duration))

    time.sleep(.5)  # just to make it show up at the bottom

    logger.info(yellow('Completed in {}'.format(td)))
Exemplo n.º 18
0
    def __init__(self, queues, number_of_processes=1, connection=None):
        if connection is None:
            conenction = get_current_connection()

        self.connection = connection
        if isinstance(queues, Queue):
            queues = [queues]

        self.log = logger

        self.queues = queues
        self.validate_queues()

        self.number_of_processes = number_of_processes

        self.failed_queue = get_failed_queue(connection=self.connection)

        self._stopped = False
Exemplo n.º 19
0
def compliance_check(job_id, dataset, checker):
    try:
        redis = get_current_connection()
        cs = CheckSuite()
        if dataset.startswith('http'):
            dataset = check_redirect(dataset)
        ds = cs.load_dataset(dataset)
        score_groups = cs.run(ds, checker)

        rpair = score_groups[checker]
        groups, errors = rpair

        aggregates = cs.build_structure(checker, groups, dataset)
        aggregates = cs.serialize(aggregates)
        aggregates['all_priorities'] = sorted(aggregates['all_priorities'],
                                              key=lambda x: x['weight'],
                                              reverse=True)
        # We use b64 to keep the filenames safe but it's helpful to the user to see
        # the filename they uploaded
        if not aggregates['source_name'].startswith('http'):
            decoded = base64.b64decode(
                aggregates['source_name'].split('/')[-1])
            if isinstance(decoded, str):
                aggregates['source_name'] = decoded
            else:
                aggregates['source_name'] = decoded.decode('utf-8')
        aggregates['ncdump'] = ncdump(dataset)
        buf = json.dumps(aggregates)

        redis.set('processing:job:%s' % job_id, buf, 3600)
        return True
    except Exception as e:
        redis.set(
            'processing:job:%s' % job_id,
            json.dumps({
                "error": type(e).__name__,
                "message": e.message
            }), 3600)
        return False
Exemplo n.º 20
0
def list_workers_api():
    workers_list = Worker.all()
    rq_workers = []
    for worker in workers_list:
        rq_workers.append({
            'worker_name':
            worker.name,
            'listening_on':
            ', '.join(queue.name for queue in worker.queues),
            'status':
            worker.get_state()
            if not is_suspended(get_current_connection()) else "suspended",
            'current_job_id':
            worker.get_current_job_id(),
            'success_jobs':
            worker.successful_job_count,
            'failed_jobs':
            worker.failed_job_count,
        })

    return {
        'data': rq_workers,
    }
Exemplo n.º 21
0
def suspend_workers_api():
    if request.method == 'POST':
        suspend(connection=get_current_connection())
        return {'message': 'Successfully suspended all workers'}
Exemplo n.º 22
0
def get_workers_dashboard():
    return render_template(
        'rqmonitor/workers.html',
        is_suspended=is_suspended(connection=get_current_connection()))
Exemplo n.º 23
0
def record_start_time(timestamp_id, **kwargs):
    conn = get_current_connection()
    conn.set(timestamp_id, time.time())
Exemplo n.º 24
0
def resume_workers_api():
    if request.method == 'POST':
        resume(connection=get_current_connection())
        return {'message': 'Successfully resumed all workers'}
Exemplo n.º 25
0
def delete_key(key):
    get_current_connection().delete(key)