예제 #1
0
파일: livejournal.py 프로젝트: nott/kkb
def publish(subject, text):
    '''
    Publish cumulative post to Livejournal.
    '''
    r = ResQ()
    for target in settings.PUBLISHING_LJ_TARGETS.iterkeys():
        r.enqueue(LivejournalPost, target, subject, text)
예제 #2
0
파일: __init__.py 프로젝트: nott/kkb
def publish(text, url=None):
    '''
    Publish status to social networks.
    '''
    r = ResQ()
    r.enqueue(TwitterStatus, text, url)
    r.enqueue(VkontakteStatus, text, url)
예제 #3
0
파일: eventq.py 프로젝트: eiroh/barista
 def perform(eventid):
     print (eventid)
     db = sqlitedb()
     eventrec = db.getactiveevent()
     for event in eventrec:
         print event['eventid']
         r = ResQ(server="%s:%s" % (resqserver, resqport))
         r.enqueue(callQ, event['eventid'])
예제 #4
0
파일: crond.py 프로젝트: boivie/sci
    def trigger(self, timer):
        intent_json = self.db.hget('timer:%s' % timer, 'intent')
        if not intent_json:
            logging.warning("Triggering timer %s, but found no intent" % timer)
            return

        from async .send_intent import SendIntent
        r = ResQ()
        r.enqueue(SendIntent, intent_json)
예제 #5
0
def do_start_build(job_name):
    input = request.json

    job = Job.load(job_name, input.get('job_ref'))
    build = Build.create(job,
                         parameters=input.get('parameters', {}),
                         description=input.get('description', ''))
    session_id = '%s-0' % build.uuid
    set_session_queued(g.db, session_id)
    r = ResQ()
    r.enqueue(DispatchSession, session_id)
    return jsonify(**build.as_dict())
예제 #6
0
def dispatch():
    input = request.json
    session_no = create_session(g.db,
                                input['build_id'],
                                parent=input['parent'],
                                labels=input['labels'],
                                run_info=input['run_info'],
                                state=SESSION_STATE_TO_BACKEND)
    session_id = '%s-%s' % (input['build_id'], session_no)
    ri = input['run_info'] or {}
    args = ", ".join(ri.get('args', []))
    title = "%s(%s)" % (ri.get('step_name', 'main'), args)
    item = RunAsync(session_no, title)
    add_slog(g.db, input['parent'], item)
    r = ResQ()
    r.enqueue(DispatchSession, session_id)
    return jsonify(session_id=session_id)
예제 #7
0
def check_in_available(agent_id):
    session_id = request.json['session_id']
    build_id, num = session_id.split('-')
    with g.db.pipeline() as pipe:
        set_session_done(pipe, session_id, request.json['result'],
                         request.json['output'], request.json['log_file'])
        if int(num) == 0:
            Build.set_done(build_id, request.json['result'], pipe=pipe)

        add_slog(pipe, session_id, SessionDone(request.json['result']))

        pipe.hmset(jdb.KEY_AGENT % agent_id,
                   dict(state=jdb.AGENT_STATE_AVAIL, seen=get_ts()))
        pipe.execute()

    r = ResQ()
    r.enqueue(AgentAvailable, agent_id)
    return jsonify()
예제 #8
0
def details():
    r = ResQ()
    with closing(get_connection()) as connection:
        total = 0
        with closing(connection.cursor()) as cursor:
            query = '''
            SELECT COUNT(id) AS count
            FROM records
            WHERE details IS NULL
            '''
            cursor.execute(query)
            total = cursor.fetchone()['count']
        with closing(connection.cursor('cursor')) as cursor:
            query = '''
            SELECT *
            FROM records
            WHERE details IS NULL
            '''
            cursor.execute(query)
            for record in tqdm(cursor, total=total):
                r.enqueue(Record, record['id'])
예제 #9
0
def submit_feedback(request):
    if request.method != 'POST':
        raise Http404
    if 'lang' in request.GET and request.GET['lang'] in ALLOWED_LANGS:
        translation.activate(request.GET['lang'])

    try:
        form = FeedbackForm(request.POST or None)
        if form.is_valid():
            r = ResQ()
            r.enqueue(FeedbackTask, form.cleaned_data['name'],
                      form.cleaned_data['email'], form.cleaned_data['text'])
            return HttpResponse(status=200)
    
        # form is invalid
        return HttpResponse(
            loader.render_to_string('feedback/feedback_form_partial.html',
                                    {'feedback_form': form}),
            status=409)
    except Exception:
        logger.exception('')
        raise
예제 #10
0
def add_repository_to_queue(user, repo_owner, repository_name, repo_object=None):
    res = ResQ()
    repo = None
    token = user.get_profile().extra_data['access_token']

    if not repo_object:

        gh = Github(login=user.email, token=token)

        repo = gh.repos.get(repo_owner, repository_name)
    else:
        repo = repo_object

    queue_data = {'email': user.email, 'token': token}
    queue_data['repo'] = {
        'name': repo.name,
        'url': repo.url,
        'git_url': repo.git_url,
        'html_url': repo.html_url,
        'language': repo.language
    }

    db_repo, created = Repository.objects.get_or_create(
            git_url=repo.git_url, defaults=queue_data['repo'])

    query_filter = {'user': user, 'repository': db_repo}
    query_filter.update({'defaults': {
            'user': user,
            "repository": db_repo
        }
    })

    as_contributor, created = Contributor.objects.get_or_create(**query_filter)

    # put a timestamp field in repository model to verify if there is need to 
    # process again. so only put in queue if is not created and timestamp > x time

    res.enqueue(RepositoryWorker, queue_data)
예제 #11
0
def do_register():
    agent_id = request.json['id']

    info = {
        "ip": request.remote_addr,
        'nick': request.json.get('nick', ''),
        "port": request.json["port"],
        "state": jdb.AGENT_STATE_AVAIL,
        "seen": get_ts(),
        "labels": ",".join(request.json["labels"])
    }

    with g.db.pipeline() as pipe:
        pipe.hmset(jdb.KEY_AGENT % agent_id, info)
        pipe.sadd(jdb.KEY_ALL, agent_id)

        for label in request.json["labels"]:
            pipe.sadd(jdb.KEY_LABEL % label, agent_id)
        pipe.execute()

    r = ResQ()
    r.enqueue(AgentAvailable, agent_id)
    return jsonify()
예제 #12
0
파일: vkontakte.py 프로젝트: nott/kkb
    def perform(text, url):
        '''
        Delayed task.
        '''
        if not settings.PUBLISHING_VKONTAKTE_USER:
            return
        if not url is None:
            text = '%s %s' % (text, url)

        method = 'wall.post'
        owner_id, token, extra_params = settings.PUBLISHING_VKONTAKTE_USER
        params = {'owner_id': owner_id,
                  'access_token': token,
                  'message': text}
        params.update(extra_params)
        resp = vk(method, params)
        post_id = resp.get("response", {}).get("post_id")
        if not post_id:
            raise ValueError('Post ID not provided.\nMethod %s\nParams %r'\
                                 '\n Response %r' % (method, params, resp))

        r = ResQ()
        for user, data in settings.PUBLISHING_VKONTAKTE_REPOST_USERS.iteritems():
            r.enqueue(VkontakteLike, user, owner_id, post_id)
def queue_job(input_coordinate=None, \
              hg18_option='off', \
              transcript_type='NA', \
              refseq_correspondence='no', \
              sample_id='yes', \
              analysis_type=None, \
              analysis_programs=None):
    if analysis_type == None:
        print 'Choose an analysis type.'
        return
    input_basename = input_coordinate + '_' + hg18_option + '_' + transcript_type + '_' + refseq_correspondence + '_' + sample_id + '.txt'
    job_name = input_basename[:-4] + '_' + analysis_type + '_' + analysis_programs + '_20121231_010101'
    job_dir = os.path.join(diagnostic_tests_dir, job_name)
    if os.path.exists(job_dir) == False:
        os.mkdir(job_dir)
    input_filename = os.path.join(job_dir, input_basename)
    shutil.copy(os.path.join(diagnostic_tests_dir, input_basename), input_filename)
    job_id = job_name
    mutation_filename = input_filename
    mutation_filename_fix = mutation_filename + '.fix'
    mutation_filename_error = mutation_filename +'.error'
    email = '*****@*****.**'
    classifier = '_Other'
    upload_filename = input_basename
    user_upload_dir = job_dir
    chosen_db_str = analysis_programs.strip('_')
    chosen_dbs = chosen_db_str.split('_')
    tsv_report = 'on'
    gene_annot = 'on'
    hg18 = hg18_option
    functional_annot = 'on'
    f = open(input_filename)
    no_input_line = 0
    for line in f:
        if len(line) > 0:
            if not line[0] in ['#', '>', '!']:
                no_input_line += 1
    f.close()
    if no_input_line >= 5000:
        queue_name = queue_name_prefix + '_LARGE'
    else:
        queue_name = queue_name_prefix + '_SMALL'
    mupit_input = 'on'
    resubmit = 'no'
    wf = open(os.path.join(job_dir, 'job_info.txt'), 'w')
    wf.write(str(no_input_line)+'\n')
    wf.write(job_id+'\n')
    wf.write(chosen_db_str+'\n')
    wf.write('call_queuer_path\n')
    wf.write('python_path\n')
    wf.write('queuer_path\n')
    wf.write(email+'\n')
    wf.write(job_id+'\n')
    wf.write(classifier+'\n')
    wf.write(mutation_filename+'\n')
    wf.write(upload_filename+'\n')
    wf.write(user_upload_dir+'\n')
    wf.write(chosen_db_str+'\n')
    wf.write(tsv_report+'\n')
    wf.write(functional_annot+'\n')
    wf.write(hg18+'\n')
    wf.write(analysis_type+'\n')
    wf.write(str(no_input_line)+'\n')
    wf.write('error_output_path\n')
    wf.write(mupit_input+'\n')
    wf.close()
    r=ResQ()
    argstr  =       'dummy'
    argstr += ' ' + 'yes'
    argstr += ' ' + user_upload_dir
    argstr += ' ' + resubmit
    argstr += ' ' + job_id
    argstr += ' -e ' + email
    argstr += ' -i ' + job_id
    argstr += ' -c ' + classifier
    argstr += ' -m ' + mutation_filename
    argstr += ' -u ' + upload_filename
    argstr += ' -D ' + user_upload_dir
    argstr += ' -d ' + chosen_db_str
    argstr += ' -t ' + tsv_report
    argstr += ' -f ' + functional_annot
    argstr += ' -r ' + hg18
    argstr += ' -y ' + analysis_type
    argstr += ' -n ' + str(no_input_line)
    argstr += ' -M ' + mupit_input
    argstr += ' -R ' + resubmit
    argstr += ' -Q ' + queue_name
    argstr += ' -T ' + 'yes' # -T option for 'test'
    r.enqueue(queue_name, masteranalyzer.MasterAnalyzer, argstr)
    global job_ids
    job_ids.append(job_id)
예제 #14
0
class Task(object):
    queue ="test-queue"
    
    def __init__(self,payload):
        self.payload = payload
    
    @staticmethod
    def perform(json_dir):
        print "do something %s\n" % (json_dir)
        
        
    



if __name__ == '__main__':
    
    
    r = ResQ()
    print("Enqueue message\n")
    r.enqueue(Task,"{key:id-1, file:'/tmp/toto.jpg'}")
    #print("get message from basic\n")
    #job = Job.reserve('test-queue', r)
    job = None
    
    if job:
        print("job payload %s\n" % (job._payload) )
    else:
        print( "No jobs \n")
    
    
예제 #15
0
파일: facebooklink.py 프로젝트: nott/kkb
def publish(text, link, image):
    r = ResQ()
    r.enqueue(FacebookLink, text, link, image, True)
    r.enqueue(FacebookLink, text, link, image, False)